def reproject_region(region, from_proj, to_proj): region = region.copy() proj_input = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', proj_in=from_proj, proj_out=to_proj, stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError("reprojecting region: m.proj error: " + stderr) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') region['east'] = elon region['north'] = nlat region['west'] = wlon region['south'] = slat return region
def copy_colors(fh, map, offset): p = gscript.pipe_command('r.colors.out', map=map) for line in p.stdout: f = gscript.decode(line).rstrip('\r\n').split(' ') if offset: if f[0] in ['nv', 'default']: continue f[0] = str(float(f[0]) + offset) fh.write(gscript.encode(' '.join(f) + '\n')) p.wait()
def copy_colors(fh, map, offset): p = gscript.pipe_command("r.colors.out", map=map) for line in p.stdout: f = gscript.decode(line).rstrip("\r\n").split(" ") if offset: if f[0] in ["nv", "default"]: continue f[0] = str(float(f[0]) + offset) fh.write(gscript.encode(" ".join(f) + "\n")) p.wait()
def _stringListToCharArr(str_list): arr = c_char_p * len(str_list) char_arr = arr() for i, st in enumerate(str_list): if st: char_arr[i] = encode(st) else: char_arr[i] = None return char_arr
def proj_to_wgs84(region): proj_in = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', flags='od', stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_in)) proc.stdin.close() proc.stdin = None proj_out, errors = proc.communicate() if proc.returncode: raise RuntimeError("m.proj error: %s" % errors) enws = gs.decode(proj_out).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') return {'east': elon, 'north': nlat, 'west': wlon, 'south': slat}
def reproject_region(region, from_proj, to_proj): """Reproject boundary of region from one projection to another. :param dict region: region to reproject as a dictionary with long key names output of get_region :param str from_proj: PROJ.4 string of region; output of get_location_proj_string :param str in_proj: PROJ.4 string of target location; output of get_location_proj_string :return dict region: reprojected region as a dictionary with long key names """ region = region.copy() proj_input = ( f"{region['east']} {region['north']}\n{region['west']} {region['south']}" ) proc = gs.start_command( "m.proj", input="-", separator=" , ", proj_in=from_proj, proj_out=to_proj, flags="d", stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE, ) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError( _("Encountered error while running m.proj: {}").format(stderr)) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(" ") wlon, slat, unused = enws[1].split(" ") region["east"] = elon region["north"] = nlat region["west"] = wlon region["south"] = slat return region
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] # Make sure the temporal database exists tgis.init() mapset = grass.encode(grass.gisenv()["MAPSET"]) sp = tgis.open_old_stds(input, "strds") grass.use_temp_region() maps = sp.get_registered_maps_as_objects_by_granularity() num_maps = len(maps) # get datatype of the first map if maps: maps[0][0].select() datatype = maps[0][0].metadata.get_datatype() else: datatype = None # Get the granularity and set bottom, top and top-bottom resolution granularity = sp.get_granularity() # This is the reference time to scale the z coordinate reftime = datetime(1900, 1, 1) # We set top and bottom according to the start time in relation # to the date 1900-01-01 00:00:00 # In case of days, hours, minutes and seconds, a double number # is used to represent days and fracs of a day # Space time voxel cubes with montly or yearly granularity can not be # mixed with other temporal units # Compatible temporal units are : days, hours, minutes and seconds # Incompatible are years and moths start, end = sp.get_temporal_extent_as_tuple() if sp.is_time_absolute(): unit = granularity.split(" ")[1] granularity = float(granularity.split(" ")[0]) print("Gran from stds %0.15f" % (granularity)) if unit == "years" or unit == "year": bottom = float(start.year - 1900) top = float(granularity * num_maps) elif unit == "months" or unit == "month": bottom = float((start.year - 1900) * 12 + start.month) top = float(granularity * num_maps) else: bottom = float(tgis.time_delta_to_relative_time(start - reftime)) days = 0.0 hours = 0.0 minutes = 0.0 seconds = 0.0 if unit == "days" or unit == "day": days = float(granularity) if unit == "hours" or unit == "hour": hours = float(granularity) if unit == "minutes" or unit == "minute": minutes = float(granularity) if unit == "seconds" or unit == "second": seconds = float(granularity) granularity = float(days + hours / 24.0 + minutes / \ 1440.0 + seconds / 86400.0) else: unit = sp.get_relative_time_unit() bottom = start top = float(bottom + granularity * float(num_maps)) try: grass.run_command("g.region", t=top, b=bottom, tbres=granularity) except CalledModuleError: grass.fatal(_("Unable to set 3D region")) # Create a NULL map to fill the gaps null_map = "temporary_null_map_%i" % os.getpid() if datatype == 'DCELL': grass.mapcalc("%s = double(null())" % (null_map)) elif datatype == 'FCELL': grass.mapcalc("%s = float(null())" % (null_map)) else: grass.mapcalc("%s = null()" % (null_map)) if maps: count = 0 map_names = "" for map in maps: # Use the first map id = map[0].get_id() # None ids will be replaced by NULL maps if id is None: id = null_map if count == 0: map_names = id else: map_names += ",%s" % id count += 1 try: grass.run_command("r.to.rast3", input=map_names, output=output, overwrite=grass.overwrite()) except CalledModuleError: grass.fatal(_("Unable to create 3D raster map <%s>" % output)) grass.run_command("g.remove", flags='f', type='raster', name=null_map) title = _("Space time voxel cube") descr = _("This space time voxel cube was created with t.rast.to.rast3") # Set the unit try: grass.run_command("r3.support", map=output, vunit=unit, title=title, description=descr, overwrite=grass.overwrite()) except CalledModuleError: grass.warning(_("%s failed to set units.") % 'r3.support') # Register the space time voxel cube in the temporal GIS if output.find("@") >= 0: id = output else: id = output + "@" + mapset start, end = sp.get_temporal_extent_as_tuple() r3ds = tgis.Raster3DDataset(id) if r3ds.is_in_db(): r3ds.select() r3ds.delete() r3ds = tgis.Raster3DDataset(id) r3ds.load() if sp.is_time_absolute(): r3ds.set_absolute_time(start, end) else: r3ds.set_relative_time(start, end, sp.get_relative_time_unit()) r3ds.insert()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.encode(grass.gisenv()["MAPSET"]) if input.find("@") >= 0: old_id = input else: old_id = input + "@" + mapset if output.find("@") >= 0: new_id = output else: new_id = output + "@" + mapset # Do not overwrite yourself if new_id == old_id: return dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.dataset_factory(type, old_id) if new_id.split("@")[1] != mapset: grass.fatal( _("Space time %s dataset <%s> can not be renamed. " "Mapset of the new identifier differs from the current " "mapset.") % (stds.get_new_map_instance(None).get_type(), old_id)) if stds.is_in_db(dbif=dbif) == False: dbif.close() grass.fatal( _("Space time %s dataset <%s> not found") % (stds.get_new_map_instance(None).get_type(), old_id)) # Check if the new id is in the database new_stds = tgis.dataset_factory(type, new_id) if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False: dbif.close() grass.fatal( _("Unable to rename Space time %s dataset <%s>. Name <%s> " "is in use, please use the overwrite flag.") % (stds.get_new_map_instance(None).get_type(), old_id, new_id)) # Remove an already existing space time dataset if new_stds.is_in_db(dbif=dbif) == True: new_stds.delete(dbif=dbif) stds.select(dbif=dbif) stds.rename(ident=new_id, dbif=dbif) stds.update_command_string(dbif=dbif)
def save_map(self): p = grass.feed_command('r.in.ascii', input='-', output=self.tempmap, quiet=True, overwrite=True) outf = p.stdin outf.write(grass.encode("north: %f\n" % self.wind['n'])) outf.write(grass.encode("south: %f\n" % self.wind['s'])) outf.write(grass.encode("east: %f\n" % self.wind['e'])) outf.write(grass.encode("west: %f\n" % self.wind['w'])) outf.write(grass.encode("rows: %d\n" % self.wind['rows'])) outf.write(grass.encode("cols: %d\n" % self.wind['cols'])) outf.write(grass.encode("null: *\n")) for row in range(self.wind['rows']): for col in range(self.wind['cols']): if col > 0: outf.write(grass.encode(" ")) val = self.values[row][col] if val and self.changed[row][col]: outf.write(grass.encode("%s" % val)) else: outf.write(grass.encode('*')) outf.write(grass.encode("\n")) outf.close() p.wait() run('g.region', raster=self.inmap) run('r.patch', input=(self.tempmap, self.outmap), output=self.outmap, overwrite=True) run('r.colors', map=self.outmap, rast=self.inmap) run('g.remove', flags='f', type='raster', name=self.tempmap)
def init(raise_fatal_error=False): """This function set the correct database backend from GRASS environmental variables and creates the grass temporal database structure for raster, vector and raster3d maps as well as for the space-time datasets strds, str3ds and stvds in case it does not exist. Several global variables are initiated and the messenger and C-library interface subprocesses are spawned. Re-run this function in case the following GRASS variables change while the process runs: - MAPSET - LOCATION_NAME - GISDBASE - TGIS_DISABLE_MAPSET_CHECK - TGIS_DISABLE_TIMESTAMP_WRITE Re-run this function if the following t.connect variables change while the process runs: - temporal GIS driver (set by t.connect driver=) - temporal GIS database (set by t.connect database=) The following environmental variables are checked: - GRASS_TGIS_PROFILE (True, False, 1, 0) - GRASS_TGIS_RAISE_ON_ERROR (True, False, 1, 0) ..warning:: This functions must be called before any spatio-temporal processing can be started :param raise_fatal_error: Set this True to assure that the init() function does not kill a persistent process like the GUI. If set True a grass.pygrass.messages.FatalError exception will be raised in case a fatal error occurs in the init process, otherwise sys.exit(1) will be called. """ # We need to set the correct database backend and several global variables # from the GRASS mapset specific environment variables of g.gisenv and t.connect global tgis_backend global tgis_database global tgis_database_string global tgis_dbmi_paramstyle global raise_on_error global enable_mapset_check global enable_timestamp_write global current_mapset global current_location global current_gisdbase raise_on_error = raise_fatal_error # We must run t.connect at first to create the temporal database and to # get the environmental variables gscript.run_command("t.connect", flags="c") grassenv = gscript.gisenv() # Set the global variable for faster access current_mapset = gscript.encode(grassenv["MAPSET"]) current_location = gscript.encode(grassenv["LOCATION_NAME"]) current_gisdbase = gscript.encode(grassenv["GISDBASE"]) # Check environment variable GRASS_TGIS_RAISE_ON_ERROR if os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "True" or \ os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "1": raise_on_error = True # Check if the script library raises on error, # if so we do the same if gscript.get_raise_on_error() is True: raise_on_error = True # Start the GRASS message interface server _init_tgis_message_interface(raise_on_error) # Start the C-library interface server _init_tgis_c_library_interface() msgr = get_tgis_message_interface() msgr.debug(1, "Initiate the temporal database") #"\n traceback:%s"%(str(" \n".join(traceback.format_stack())))) ciface = get_tgis_c_library_interface() driver_string = ciface.get_driver_name() database_string = ciface.get_database_name() # Set the mapset check and the timestamp write if "TGIS_DISABLE_MAPSET_CHECK" in grassenv: if gscript.encode(grassenv["TGIS_DISABLE_MAPSET_CHECK"]) == "True" or \ gscript.encode(grassenv["TGIS_DISABLE_MAPSET_CHECK"]) == "1": enable_mapset_check = False msgr.warning("TGIS_DISABLE_MAPSET_CHECK is True") if "TGIS_DISABLE_TIMESTAMP_WRITE" in grassenv: if gscript.encode(grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"]) == "True" or \ gscript.encode(grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"]) == "1": enable_timestamp_write = False msgr.warning("TGIS_DISABLE_TIMESTAMP_WRITE is True") if driver_string is not None and driver_string is not "": if driver_string == "sqlite": tgis_backend = driver_string try: import sqlite3 except ImportError: msgr.error("Unable to locate the sqlite SQL Python interface" " module sqlite3.") raise dbmi = sqlite3 elif driver_string == "pg": tgis_backend = driver_string try: import psycopg2 except ImportError: msgr.error("Unable to locate the Postgresql SQL Python " "interface module psycopg2.") raise dbmi = psycopg2 else: msgr.fatal( _("Unable to initialize the temporal DBMI interface. " "Please use t.connect to specify the driver and the" " database string")) else: # Set the default sqlite3 connection in case nothing was defined gscript.run_command("t.connect", flags="d") driver_string = ciface.get_driver_name() database_string = ciface.get_database_name() tgis_backend = driver_string dbmi = sqlite3 tgis_database_string = database_string # Set the parameter style tgis_dbmi_paramstyle = dbmi.paramstyle # We do not know if the database already exists db_exists = False dbif = SQLDatabaseInterfaceConnection() # Check if the database already exists if tgis_backend == "sqlite": # Check path of the sqlite database if os.path.exists(tgis_database_string): dbif.connect() # Check for raster_base table dbif.execute("SELECT name FROM sqlite_master WHERE type='table' " "AND name='raster_base';") name = dbif.fetchone() if name and name[0] == "raster_base": db_exists = True dbif.close() elif tgis_backend == "pg": # Connect to database dbif.connect() # Check for raster_base table dbif.execute( "SELECT EXISTS(SELECT * FROM information_schema.tables " "WHERE table_name=%s)", ('raster_base', )) if dbif.fetchone()[0]: db_exists = True backup_howto = "The format of your actual temporal database is not " \ "supported any more.\nSolution: You need to export it by " \ "restoring the GRASS GIS version used for creating this DB"\ ". From there, create a backup of your temporal database "\ "to avoid the loss of your temporal data.\nNotes: Use " \ "t.rast.export and t.vect.export to make a backup of your" \ " existing space time datasets.To safe the timestamps of" \ " your existing maps and space time datasets, use " \ "t.rast.list, t.vect.list and t.rast3d.list. "\ "You can register the existing time stamped maps easily if"\ " you export columns=id,start_time,end_time into text "\ "files and use t.register to register them again in new" \ " created space time datasets (t.create). After the backup"\ " remove the existing temporal database, a new one will be"\ " created automatically.\n" if db_exists is True: # Check the version of the temporal database dbif.close() dbif.connect() metadata = get_tgis_metadata(dbif) dbif.close() if metadata is None: msgr.fatal( _("Unable to receive temporal database metadata.\n" "Current temporal database info:%(info)s") % ({ "info": get_database_info_string() })) for entry in metadata: if "tgis_version" in entry and entry[1] != str(get_tgis_version()): msgr.fatal( _("Unsupported temporal database: version mismatch." "\n %(backup)s Supported temporal API version is:" " %(api)i.\nPlease update your GRASS GIS " "installation.\nCurrent temporal database info:" "%(info)s") % ({ "backup": backup_howto, "api": get_tgis_version(), "info": get_database_info_string() })) if "tgis_db_version" in entry and entry[1] != str( get_tgis_db_version()): msgr.fatal( _("Unsupported temporal database: version mismatch." "\n %(backup)sSupported temporal database version" " is: %(tdb)i\nCurrent temporal database info:" "%(info)s") % ({ "backup": backup_howto, "tdb": get_tgis_version(), "info": get_database_info_string() })) return create_temporal_database(dbif)
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] base = options["basename"] where = options["where"] nprocs = options["nprocs"] tsuffix = options["suffix"] mapset = grass.encode(grass.gisenv()["MAPSET"]) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds") maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif) num = len(maps) # Configure the r.to.vect module gapfill_module = pymod.Module( "r.series.interp", overwrite=grass.overwrite(), quiet=True, run_=False, finish_=False, ) process_queue = pymod.ParallelModuleQueue(int(nprocs)) gap_list = [] overwrite_flags = {} # Identify all gaps and create new names count = 0 for _map in maps: if _map.get_id() is None: count += 1 if sp.get_temporal_type() == 'absolute' and tsuffix in [ 'gran', 'time' ]: _id = "{ba}@{ma}".format(ba=base, ma=mapset) else: map_name = tgis.create_numeric_suffix(base, num + count, tsuffix) _id = "{name}@{ma}".format(name=map_name, ma=mapset) _map.set_id(_id) gap_list.append(_map) if len(gap_list) == 0: grass.message(_("No gaps found")) return # Build the temporal topology tb = tgis.SpatioTemporalTopologyBuilder() tb.build(maps) # Do some checks before computation for _map in gap_list: if not _map.get_precedes() or not _map.get_follows(): grass.fatal( _("Unable to determine successor " "and predecessor of a gap.")) if len(_map.get_precedes()) > 1: grass.warning( _("More than one successor of the gap found. " "Using the first found.")) if len(_map.get_follows()) > 1: grass.warning( _("More than one predecessor of the gap found. " "Using the first found.")) # Interpolate the maps using parallel processing result_list = [] for _map in gap_list: predecessor = _map.get_follows()[0] successor = _map.get_precedes()[0] gran = sp.get_granularity() tmpval, start = predecessor.get_temporal_extent_as_tuple() end, tmpval = successor.get_temporal_extent_as_tuple() # Now resample the gap map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity( (_map, ), start, end, gran) map_names = [] map_positions = [] increment = 1.0 / (len(map_matrix) + 1.0) position = increment count = 0 for intp_list in map_matrix: new_map = intp_list[0] count += 1 if sp.get_temporal_type() == 'absolute' and tsuffix == 'gran': suffix = tgis.create_suffix_from_datetime( new_map.temporal_extent.get_start_time(), sp.get_granularity()) new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(), su=suffix, ma=mapset) elif sp.get_temporal_type() == 'absolute' and tsuffix == 'time': suffix = tgis.create_time_suffix(new_map) new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(), su=suffix, ma=mapset) else: map_name = tgis.create_numeric_suffix(new_map.get_name(), count, tsuffix) new_id = "{name}@{ma}".format(name=map_name, ma=mapset) new_map.set_id(new_id) overwrite_flags[new_id] = False if new_map.map_exists() or new_map.is_in_db(dbif): if not grass.overwrite(): grass.fatal( _("Map with name <%s> already exists. " "Please use another base name." % (_id))) else: if new_map.is_in_db(dbif): overwrite_flags[new_id] = True map_names.append(new_map.get_name()) map_positions.append(position) position += increment result_list.append(new_map) mod = copy.deepcopy(gapfill_module) mod(input=(predecessor.get_map_id(), successor.get_map_id()), datapos=(0, 1), output=map_names, samplingpos=map_positions) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Insert new interpolated maps in temporal database and dataset for _map in result_list: id = _map.get_id() if overwrite_flags[id] == True: if _map.is_time_absolute(): start, end = _map.get_absolute_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_absolute_time(start, end) else: start, end, unit = _map.get_relative_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_relative_time(start, end, unit) _map.load() _map.insert(dbif) sp.register_map(_map, dbif) sp.update_from_registered_maps(dbif) sp.update_command_string(dbif=dbif) dbif.close()
def main(): if not hasNumPy: grass.fatal(_("Required dependency NumPy not found. Exiting.")) sharpen = options["method"] # sharpening algorithm ms1_orig = options["blue"] # blue channel ms2_orig = options["green"] # green channel ms3_orig = options["red"] # red channel pan_orig = options["pan"] # high res pan channel out = options["output"] # prefix for output RGB maps bits = options["bitdepth"] # bit depth of image channels bladjust = flags["l"] # adjust blue channel sproc = flags["s"] # serial processing rescale = flags["r"] # rescale to spread pixel values to entire 0-255 range # Checking bit depth bits = float(bits) if bits < 2 or bits > 30: grass.warning(_("Bit depth is outside acceptable range")) return outb = grass.core.find_file("%s_blue" % out) outg = grass.core.find_file("%s_green" % out) outr = grass.core.find_file("%s_red" % out) if ( outb["name"] != "" or outg["name"] != "" or outr["name"] != "" ) and not grass.overwrite(): grass.warning( _( "Maps with selected output prefix names already exist." " Delete them or use overwrite flag" ) ) return pid = str(os.getpid()) # convert input image channels to 8 bit for processing ms1 = "tmp%s_ms1" % pid ms2 = "tmp%s_ms2" % pid ms3 = "tmp%s_ms3" % pid pan = "tmp%s_pan" % pid if not rescale: if bits == 8: grass.message(_("Using 8bit image channels")) if sproc: # serial processing grass.run_command( "g.copy", raster="%s,%s" % (ms1_orig, ms1), quiet=True, overwrite=True, ) grass.run_command( "g.copy", raster="%s,%s" % (ms2_orig, ms2), quiet=True, overwrite=True, ) grass.run_command( "g.copy", raster="%s,%s" % (ms3_orig, ms3), quiet=True, overwrite=True, ) grass.run_command( "g.copy", raster="%s,%s" % (pan_orig, pan), quiet=True, overwrite=True, ) else: # parallel processing pb = grass.start_command( "g.copy", raster="%s,%s" % (ms1_orig, ms1), quiet=True, overwrite=True, ) pg = grass.start_command( "g.copy", raster="%s,%s" % (ms2_orig, ms2), quiet=True, overwrite=True, ) pr = grass.start_command( "g.copy", raster="%s,%s" % (ms3_orig, ms3), quiet=True, overwrite=True, ) pp = grass.start_command( "g.copy", raster="%s,%s" % (pan_orig, pan), quiet=True, overwrite=True, ) pb.wait() pg.wait() pr.wait() pp.wait() else: grass.message(_("Converting image chanels to 8bit for processing")) maxval = pow(2, bits) - 1 if sproc: # serial processing grass.run_command( "r.rescale", input=ms1_orig, from_="0,%f" % maxval, output=ms1, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=ms2_orig, from_="0,%f" % maxval, output=ms2, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=ms3_orig, from_="0,%f" % maxval, output=ms3, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=pan_orig, from_="0,%f" % maxval, output=pan, to="0,255", quiet=True, overwrite=True, ) else: # parallel processing pb = grass.start_command( "r.rescale", input=ms1_orig, from_="0,%f" % maxval, output=ms1, to="0,255", quiet=True, overwrite=True, ) pg = grass.start_command( "r.rescale", input=ms2_orig, from_="0,%f" % maxval, output=ms2, to="0,255", quiet=True, overwrite=True, ) pr = grass.start_command( "r.rescale", input=ms3_orig, from_="0,%f" % maxval, output=ms3, to="0,255", quiet=True, overwrite=True, ) pp = grass.start_command( "r.rescale", input=pan_orig, from_="0,%f" % maxval, output=pan, to="0,255", quiet=True, overwrite=True, ) pb.wait() pg.wait() pr.wait() pp.wait() else: grass.message(_("Rescaling image chanels to 8bit for processing")) min_ms1 = int(grass.raster_info(ms1_orig)["min"]) max_ms1 = int(grass.raster_info(ms1_orig)["max"]) min_ms2 = int(grass.raster_info(ms2_orig)["min"]) max_ms2 = int(grass.raster_info(ms2_orig)["max"]) min_ms3 = int(grass.raster_info(ms3_orig)["min"]) max_ms3 = int(grass.raster_info(ms3_orig)["max"]) min_pan = int(grass.raster_info(pan_orig)["min"]) max_pan = int(grass.raster_info(pan_orig)["max"]) maxval = pow(2, bits) - 1 if sproc: # serial processing grass.run_command( "r.rescale", input=ms1_orig, from_="%f,%f" % (min_ms1, max_ms1), output=ms1, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=ms2_orig, from_="%f,%f" % (min_ms2, max_ms2), output=ms2, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=ms3_orig, from_="%f,%f" % (min_ms3, max_ms3), output=ms3, to="0,255", quiet=True, overwrite=True, ) grass.run_command( "r.rescale", input=pan_orig, from_="%f,%f" % (min_pan, max_pan), output=pan, to="0,255", quiet=True, overwrite=True, ) else: # parallel processing pb = grass.start_command( "r.rescale", input=ms1_orig, from_="%f,%f" % (min_ms1, max_ms1), output=ms1, to="0,255", quiet=True, overwrite=True, ) pg = grass.start_command( "r.rescale", input=ms2_orig, from_="%f,%f" % (min_ms2, max_ms2), output=ms2, to="0,255", quiet=True, overwrite=True, ) pr = grass.start_command( "r.rescale", input=ms3_orig, from_="%f,%f" % (min_ms3, max_ms3), output=ms3, to="0,255", quiet=True, overwrite=True, ) pp = grass.start_command( "r.rescale", input=pan_orig, from_="%f,%f" % (min_pan, max_pan), output=pan, to="0,255", quiet=True, overwrite=True, ) pb.wait() pg.wait() pr.wait() pp.wait() # get PAN resolution: kv = grass.raster_info(map=pan) nsres = kv["nsres"] ewres = kv["ewres"] panres = (nsres + ewres) / 2 # clone current region grass.use_temp_region() grass.run_command("g.region", res=panres, align=pan) # Select sharpening method grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres)) if sharpen == "brovey": brovey(pan, ms1, ms2, ms3, out, pid, sproc) elif sharpen == "ihs": ihs(pan, ms1, ms2, ms3, out, pid, sproc) elif sharpen == "pca": pca(pan, ms1, ms2, ms3, out, pid, sproc) # Could add other sharpening algorithms here, e.g. wavelet transformation grass.message(_("Assigning grey equalized color tables to output images...")) # equalized grey scales give best contrast grass.message(_("setting pan-sharpened channels to equalized grey scale")) for ch in ["red", "green", "blue"]: grass.run_command( "r.colors", quiet=True, map="%s_%s" % (out, ch), flags="e", color="grey" ) # Landsat too blue-ish because panchromatic band less sensitive to blue # light, so output blue channed can be modified if bladjust: grass.message(_("Adjusting blue channel color table...")) blue_colors = ["0 0 0 0\n5% 0 0 0\n67% 255 255 255\n100% 255 255 255"] # these previous colors are way too blue for landsat # blue_colors = ['0 0 0 0\n10% 0 0 0\n20% 200 200 200\n40% 230 230 230\n67% 255 255 255\n100% 255 255 255'] bc = grass.feed_command("r.colors", quiet=True, map="%s_blue" % out, rules="-") bc.stdin.write(grass.encode("\n".join(blue_colors))) bc.stdin.close() # output notice grass.verbose(_("The following pan-sharpened output maps have been generated:")) for ch in ["red", "green", "blue"]: grass.verbose(_("%s_%s") % (out, ch)) grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out)) grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out))) grass.verbose( _("If desired, combine channels into a single RGB map with 'r.composite'.") ) grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance.")) # write cmd history: for ch in ["red", "green", "blue"]: grass.raster_history("%s_%s" % (out, ch)) # create a group with the three outputs # grass.run_command('i.group', group=out, # input="{n}_red,{n}_blue,{n}_green".format(n=out)) # Cleanup grass.message(_("cleaning up temp files")) try: grass.run_command( "g.remove", flags="f", type="raster", pattern="tmp%s*" % pid, quiet=True ) except: ""
def main(): # Get the options file = options["file"] input = options["input"] maps = options["maps"] type = options["type"] # Make sure the temporal database exists tgis.init() if maps and file: grass.fatal( _("%s= and %s= are mutually exclusive") % ("input", "file")) if not maps and not file: grass.fatal(_("%s= or %s= must be specified") % ("input", "file")) mapset = grass.encode(grass.gisenv()["MAPSET"]) dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() # In case a space time dataset is specified if input: sp = tgis.open_old_stds(input, type, dbif) maplist = [] dummy = tgis.RasterDataset(None) # Map names as comma separated string if maps is not None and maps != "": if maps.find(",") == -1: maplist = [ maps, ] else: maplist = maps.split(",") # Build the maplist for count in range(len(maplist)): mapname = maplist[count] mapid = dummy.build_id(mapname, mapset) maplist[count] = mapid # Read the map list from file if file: fd = open(file, "r") line = True while True: line = fd.readline() if not line: break mapname = line.strip() mapid = dummy.build_id(mapname, mapset) maplist.append(mapid) num_maps = len(maplist) update_dict = {} count = 0 statement = "" # Unregister already registered maps grass.message(_("Unregister maps")) for mapid in maplist: if count % 10 == 0: grass.percent(count, num_maps, 1) map = tgis.dataset_factory(type, mapid) # Unregister map if in database if map.is_in_db(dbif) == True: # Unregister from a single dataset if input: # Collect SQL statements statement += sp.unregister_map(map=map, dbif=dbif, execute=False) # Unregister from temporal database else: # We need to update all datasets after the removement of maps map.metadata.select(dbif) datasets = map.get_registered_stds(dbif) # Store all unique dataset ids in a dictionary if datasets: for dataset in datasets: update_dict[dataset] = dataset # Collect SQL statements statement += map.delete(dbif=dbif, update=False, execute=False) else: grass.warning( _("Unable to find %s map <%s> in temporal database" % (map.get_type(), map.get_id()))) count += 1 # Execute the collected SQL statenents if statement: dbif.execute_transaction(statement) grass.percent(num_maps, num_maps, 1) # Update space time datasets if input: grass.message( _("Unregister maps from space time dataset <%s>" % (input))) else: grass.message(_("Unregister maps from the temporal database")) if input: sp.update_from_registered_maps(dbif) sp.update_command_string(dbif=dbif) elif len(update_dict) > 0: count = 0 for key in update_dict.keys(): id = update_dict[key] sp = tgis.open_old_stds(id, type, dbif) sp.update_from_registered_maps(dbif) grass.percent(count, len(update_dict), 1) count += 1 dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] method = options["method"] quantile = options["quantile"] order = options["order"] where = options["where"] add_time = flags["t"] nulls = flags["n"] # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "strds") rows = sp.get_registered_maps("id", where, order, None) if rows: # Create the r.series input file filename = grass.tempfile(True) file = open(filename, 'w') for row in rows: string = "%s\n" % (row["id"]) file.write(string) file.close() flag = "" if len(rows) > 1000: grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing")) flag += "z" if nulls: flag += "n" try: grass.run_command("r.series", flags=flag, file=filename, output=output, overwrite=grass.overwrite(), method=method, quantile=quantile) except CalledModuleError: grass.fatal(_("%s failed. Check above error messages.") % 'r.series') if not add_time: # Create the time range for the output map if output.find("@") >= 0: id = output else: mapset = grass.encode(grass.gisenv()["MAPSET"]) id = output + "@" + mapset map = sp.get_new_map_instance(id) map.load() # We need to set the temporal extent from the subset of selected maps maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=None) first_map = maps[0] last_map = maps[-1] start_a, end_a = first_map.get_temporal_extent_as_tuple() start_b, end_b = last_map.get_temporal_extent_as_tuple() if end_b is None: end_b = start_b if first_map.is_time_absolute(): extent = tgis.AbsoluteTemporalExtent(start_time=start_a, end_time=end_b) else: extent = tgis.RelativeTemporalExtent(start_time=start_a, end_time=end_b, unit=first_map.get_relative_time_unit()) map.set_temporal_extent(extent=extent) # Register the map in the temporal database if map.is_in_db(): map.update_all() else: map.insert()
def save_map(self): p = grass.feed_command("r.in.ascii", input="-", output=self.tempmap, quiet=True, overwrite=True) outf = p.stdin outf.write(grass.encode("north: %f\n" % self.wind["n"])) outf.write(grass.encode("south: %f\n" % self.wind["s"])) outf.write(grass.encode("east: %f\n" % self.wind["e"])) outf.write(grass.encode("west: %f\n" % self.wind["w"])) outf.write(grass.encode("rows: %d\n" % self.wind["rows"])) outf.write(grass.encode("cols: %d\n" % self.wind["cols"])) outf.write(grass.encode("null: *\n")) for row in range(self.wind["rows"]): for col in range(self.wind["cols"]): if col > 0: outf.write(grass.encode(" ")) val = self.values[row][col] if val and self.changed[row][col]: outf.write(grass.encode("%s" % val)) else: outf.write(grass.encode("*")) outf.write(grass.encode("\n")) outf.close() p.wait() run("g.region", raster=self.inmap) run( "r.patch", input=(self.tempmap, self.outmap), output=self.outmap, overwrite=True, ) run("r.colors", map=self.outmap, rast=self.inmap) run("g.remove", flags="f", type="raster", name=self.tempmap)
def main(): # lazy imports import grass.temporal as tgis # Get the options inputs = options["inputs"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.encode(grass.gisenv()["MAPSET"]) inputs_split = inputs.split(",") input_ids = [] for input in inputs_split: if input.find("@") >= 0: input_ids.append(input) else: input_ids.append(input + "@" + mapset) # Set the output name correct if output.find("@") >= 0: out_mapset = output.split("@")[1] if out_mapset != mapset: grass.fatal(_("Output space time dataset <%s> must be located in this mapset") % (output)) else: output_id = output + "@" + mapset dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds_list = [] first = None for id in input_ids: stds = tgis.open_old_stds(id, type, dbif) if first is None: first = stds if first.get_temporal_type() != stds.get_temporal_type(): dbif.close() grass.fatal(_("Space time datasets to merge must have the same temporal type")) stds_list.append(stds) # Do nothing if nothing to merge if first is None: dbif.close() return # Check if the new id is in the database output_stds = tgis.dataset_factory(type, output_id) output_exists = output_stds.is_in_db(dbif=dbif) if output_exists == True and grass.overwrite() == False: dbif.close() grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\ "please use the overwrite flag.") % \ (stds.get_new_map_instance(None).get_type(), output_id)) if not output_exists: output_stds = tgis.open_new_stds(output, type, first.get_temporal_type(), "Merged space time dataset", "Merged space time dataset", "mean", dbif=dbif, overwrite=False) else: output_stds.select(dbif=dbif) registered_output_maps = {} # Maps that are already registered in an existing dataset # are not registered again if output_exists == True: rows = output_stds.get_registered_maps(columns="id", dbif=dbif) if rows: for row in rows: registered_output_maps[row["id"]] = row["id"] for stds in stds_list: # Avoid merging of already registered maps if stds.get_id() != output_stds.get_id(): maps = stds.get_registered_maps_as_objects(dbif=dbif) if maps: for map in maps: # Jump over already registered maps if map.get_id() in registered_output_maps: continue map.select(dbif=dbif) output_stds.register_map(map=map, dbif=dbif) # Update the registered map list registered_output_maps[map.get_id()] = map.get_id() output_stds.update_from_registered_maps(dbif=dbif) if output_exists == True: output_stds.update_command_string(dbif=dbif)
def _download(self): """!Downloads data from WMS server using own driver @return temp_map with downloaded data """ grass.message(_("Downloading data from WMS server...")) server_url = self.params["url"] if "?" in self.params["url"]: self.params["url"] += "&" else: self.params["url"] += "?" if not self.params['capfile']: self.cap_file = self._fetchCapabilities(self.params) else: self.cap_file = self.params['capfile'] # initialize correct manager according to chosen OGC service if self.params['driver'] == 'WMTS_GRASS': req_mgr = WMTSRequestMgr(self.params, self.bbox, self.region, self.proj_srs, self.cap_file) elif self.params['driver'] == 'WMS_GRASS': req_mgr = WMSRequestMgr(self.params, self.bbox, self.region, self.tile_size, self.proj_srs) elif self.params['driver'] == 'OnEarth_GRASS': req_mgr = OnEarthRequestMgr(self.params, self.bbox, self.region, self.proj_srs, self.cap_file) # get information about size in pixels and bounding box of raster, where # all tiles will be joined map_region = req_mgr.GetMapRegion() init = True temp_map = None fetch_try = 0 # iterate through all tiles and download them while True: if fetch_try == 0: # get url for request the tile and information for placing the tile into # raster with other tiles tile = req_mgr.GetNextTile() # if last tile has been already downloaded if not tile: break # url for request the tile query_url = tile[0] # the tile size and offset in pixels for placing it into raster where tiles are joined tile_ref = tile[1] grass.debug(query_url, 2) try: wms_data = self._fetchDataFromServer(query_url, self.params['username'], self.params['password']) except (IOError, HTTPException) as e: if isinstance(e, HTTPError) and e.code == 401: grass.fatal( _("Authorization failed to '%s' when fetching data.\n%s" ) % (self.params['url'], str(e))) else: grass.fatal( _("Unable to fetch data from: '%s'\n%s") % (self.params['url'], str(e))) temp_tile = self._tempfile() # download data into temporary file try: temp_tile_opened = open(temp_tile, 'wb') temp_tile_opened.write(wms_data.read()) except IOError as e: # some servers are not happy with many subsequent requests for tiles done immediately, # if immediate request was unsuccessful, try to repeat the request after 5s and 30s breaks # TODO probably servers can return more kinds of errors related to this # problem (not only 104) if isinstance(e, socket.error) and e[0] == 104 and fetch_try < 2: fetch_try += 1 if fetch_try == 1: sleep_time = 5 elif fetch_try == 2: sleep_time = 30 grass.warning( _("Server refused to send data for a tile.\nRequest will be repeated after %d s." ) % sleep_time) sleep(sleep_time) continue else: grass.fatal( _("Unable to write data into tempfile.\n%s") % str(e)) finally: temp_tile_opened.close() fetch_try = 0 tile_dataset_info = gdal.Open(temp_tile, gdal.GA_ReadOnly) if tile_dataset_info is None: # print error xml returned from server try: error_xml_opened = open(temp_tile, 'rb') err_str = error_xml_opened.read() except IOError as e: grass.fatal( _("Unable to read data from tempfile.\n%s") % str(e)) finally: error_xml_opened.close() if err_str is not None: grass.fatal(_("WMS server error: %s") % err_str) else: grass.fatal(_("WMS server unknown error")) temp_tile_pct2rgb = None if tile_dataset_info.RasterCount < 1: grass.fatal( _("WMS server error: no band(s) received. Is server URL correct? <%s>" ) % server_url) if tile_dataset_info.RasterCount == 1 and \ tile_dataset_info.GetRasterBand(1).GetRasterColorTable() is not None: # expansion of color table into bands temp_tile_pct2rgb = self._tempfile() tile_dataset = self._pct2rgb(temp_tile, temp_tile_pct2rgb) else: tile_dataset = tile_dataset_info # initialization of temp_map_dataset, where all tiles are merged if init: temp_map = self._tempfile() driver = gdal.GetDriverByName(self.gdal_drv_format) metadata = driver.GetMetadata() if gdal.DCAP_CREATE not in metadata or \ metadata[gdal.DCAP_CREATE] == 'NO': grass.fatal( _('Driver %s does not supports Create() method') % drv_format) self.temp_map_bands_num = tile_dataset.RasterCount temp_map_dataset = driver.Create( temp_map, map_region['cols'], map_region['rows'], self.temp_map_bands_num, tile_dataset.GetRasterBand(1).DataType) init = False # tile is written into temp_map tile_to_temp_map = tile_dataset.ReadRaster(0, 0, tile_ref['sizeX'], tile_ref['sizeY'], tile_ref['sizeX'], tile_ref['sizeY']) temp_map_dataset.WriteRaster(tile_ref['t_cols_offset'], tile_ref['t_rows_offset'], tile_ref['sizeX'], tile_ref['sizeY'], tile_to_temp_map) tile_dataset = None tile_dataset_info = None grass.try_remove(temp_tile) grass.try_remove(temp_tile_pct2rgb) if not temp_map: return temp_map # georeferencing and setting projection of temp_map projection = grass.read_command('g.proj', flags='wf', epsg=self.params['srs']) projection = projection.rstrip('\n') temp_map_dataset.SetProjection(grass.encode(projection)) pixel_x_length = (map_region['maxx'] - map_region['minx']) / int( map_region['cols']) pixel_y_length = (map_region['miny'] - map_region['maxy']) / int( map_region['rows']) geo_transform = [ map_region['minx'], pixel_x_length, 0.0, map_region['maxy'], 0.0, pixel_y_length ] temp_map_dataset.SetGeoTransform(geo_transform) temp_map_dataset = None return temp_map
def main(): # lazy imports import grass.temporal as tgis name = options["input"] type_ = options["type"] shellstyle = flags['g'] system = flags['d'] history = flags['h'] # Make sure the temporal database exists tgis.init() dbif, connected = tgis.init_dbif(None) rows = tgis.get_tgis_metadata(dbif) if system and not shellstyle and not history: # 0123456789012345678901234567890 print(" +------------------- Temporal DBMI backend information ----------------------+") print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__)) print(" | Temporal database string:... " + str( tgis.get_tgis_database_string())) print(" | SQL template path:.......... " + str( tgis.get_sql_template_path())) if rows: for row in rows: print(" | %s .......... %s"%(row[0], row[1])) print(" +----------------------------------------------------------------------------+") return elif system and not history: print("dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'") print("dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'") print("sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'") if rows: for row in rows: print("%s=\'%s\'"%(row[0], row[1])) return if not system and not name: grass.fatal(_("Please specify %s=") % ("name")) if name.find("@") >= 0: id_ = name else: id_ = name + "@" + grass.encode(grass.gisenv()["MAPSET"]) dataset = tgis.dataset_factory(type_, id_) if dataset.is_in_db(dbif) == False: grass.fatal(_("Dataset <%s> not found in temporal database") % (id_)) dataset.select(dbif) if history == True and type_ in ["strds", "stvds", "str3ds"]: dataset.print_history() return if shellstyle == True: dataset.print_shell_info() else: dataset.print_info()
def txt2numpy( tablestring, sep=",", names=None, null_value=None, fill_value=None, comments="#", usecols=None, encoding=None, structured=True, ): """ Can be removed when the function is included in grass core. Read table-like output from grass modules as Numpy array; format instructions are handed down to Numpys genfromtxt function :param stdout: tabular stdout from GRASS GIS module call :type stdout: str|byte :param sep: Separator delimiting columns :type sep: str :param names: List of strings with names for columns :type names: list :param null_value: Characters representing the no-data value :type null_value: str :param fill_value: Value to fill no-data with :type fill_value: str :param comments: Character that identifies comments in the input string :type comments: str :param usecols: List of columns to import :type usecols: list :param structured: return structured array if True, un-structured otherwise :type structured: bool :return: numpy.ndarray """ from io import BytesIO if not encoding: encoding = grassutils._get_encoding() if type(tablestring).__name__ == "str": tablestring = grass.encode(tablestring, encoding=encoding) elif type(tablestring).__name__ != "bytes": grass.fatal("Unsupported data type") kwargs = { "missing_values": null_value, "filling_values": fill_value, "usecols": usecols, "names": names, "delimiter": sep, "comments": comments, } if np.version.version >= "1.14": kwargs["encoding"] = encoding if structured: kwargs["dtype"] = None np_array = np.genfromtxt(BytesIO(tablestring), **kwargs) return np_array
def main(): """Do the main processing """ # Parse input options: patch_map = options['input'] patches = patch_map.split('@')[0] patches_mapset = patch_map.split('@')[1] if len( patch_map.split('@')) > 1 else None pop_proxy = options['pop_proxy'] layer = options['layer'] costs = options['costs'] cutoff = float(options['cutoff']) border_dist = int(options['border_dist']) conefor_dir = options['conefor_dir'] memory = int(options['memory']) # Parse output options: prefix = options['prefix'] edge_map = '{}_edges'.format(prefix) vertex_map = '{}_vertices'.format(prefix) shortest_paths = '{}_shortest_paths'.format(prefix) # Parse flags: p_flag = flags['p'] t_flag = flags['t'] r_flag = flags['r'] dist_flags = 'kn' if flags['k'] else 'n' lin_cat = 1 zero_dist = None folder = grass.tempdir() if not os.path.exists(folder): os.makedirs(folder) # Setup counter for progress message counter = 0 # Check if location is lat/lon (only in lat/lon geodesic distance # measuring is supported) if grass.locn_is_latlong(): grass.verbose("Location is lat/lon: Geodesic distance \ measure is used") # Check if prefix is legal GRASS name if not grass.legal_name(prefix): grass.fatal('{} is not a legal name for GRASS \ maps.'.format(prefix)) if prefix[0].isdigit(): grass.fatal('Tables names starting with a digit are not SQL \ compliant.'.format(prefix)) # Check if output maps not already exists or could be overwritten for output in [edge_map, vertex_map, shortest_paths]: if grass.db.db_table_exist(output) and not grass.overwrite(): grass.fatal('Vector map <{}> already exists'.format(output)) # Check if input has required attributes in_db_connection = grass.vector.vector_db(patch_map) if not int(layer) in in_db_connection.keys(): grass.fatal('No attribute table connected vector map {} at \ layer {}.'.format(patches, layer)) #Check if cat column exists pcols = grass.vector.vector_columns(patch_map, layer=layer) #Check if cat column exists if not 'cat' in pcols.keys(): grass.fatal('Cannot find the reqired column cat in vector map \ {}.'.format(patches)) #Check if pop_proxy column exists if not pop_proxy in pcols.keys(): grass.fatal('Cannot find column {} in vector map \ {}'.format(pop_proxy, patches)) #Check if pop_proxy column is numeric type if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL', 'DOUBLE PRECISION']: grass.fatal('Column {} is of type {}. Only numeric types \ (integer or double precision) \ allowed!'.format(pop_proxy, pcols[pop_proxy]['type'])) #Check if pop_proxy column does not contain values <= 0 pop_vals = np.fromstring(grass.read_command('v.db.select', flags='c', map=patches, columns=pop_proxy, nv=-9999).rstrip('\n'), dtype=float, sep='\n') if np.min(pop_vals) <= 0: grass.fatal('Column {} contains values <= 0 or NULL. Neither \ values <= 0 nor NULL allowed!}'.format(pop_proxy)) ############################################## # Use pygrass region instead of grass.parse_command !?! start_reg = grass.parse_command('g.region', flags='ugp') max_n = start_reg['n'] min_s = start_reg['s'] max_e = start_reg['e'] min_w = start_reg['w'] # cost_nsres = reg['nsres'] # cost_ewres = reg['ewres'] # Rasterize patches # http://www.gdal.org/gdal_tutorial.html # http://geoinformaticstutorial.blogspot.no/2012/11/convert- # shapefile-to-raster-with-gdal.html if t_flag: # Rasterize patches with "all-touched" mode using GDAL # Read region-settings (not needed canuse max_n, min_s, max_e, # min_w nsres, ewres... prast = os.path.join(folder, 'patches_rast.tif') # Check if GDAL-GRASS plugin is installed if ogr.GetDriverByName('GRASS'): #With GDAL-GRASS plugin #Locate file for patch vector map pfile = grass.parse_command('g.findfile', element='vector', file=patches, mapset=patches_mapset)['file'] pfile = os.path.join(pfile, 'head') else: # Without GDAL-GRASS-plugin grass.warning("Cannot find GDAL-GRASS plugin. Consider \ installing it in order to save time for \ all-touched rasterisation") pfile = os.path.join(folder, 'patches_vect.gpkg') # Export patch vector map to temp-file in a GDAL-readable # format (shp) grass.run_command('v.out.ogr', flags='m', quiet=True, input=patch_map, type='area', layer=layer, output=pfile, lco='GEOMETRY_NAME=geom') # Rasterize vector map with all-touched option os.system('gdal_rasterize -l {} -at -tr {} {} \ -te {} {} {} {} -ot Uint32 -a cat \ {} {} -q'.format(patches, start_reg['ewres'], start_reg['nsres'], start_reg['w'], start_reg['s'], start_reg['e'], start_reg['n'], pfile, prast)) if not ogr.GetDriverByName('GRASS'): # Remove vector temp-file os.remove(os.path.join(folder, 'patches_vect.gpkg')) # Import rasterized patches grass.run_command('r.external', flags='o', quiet=True, input=prast, output='{}_patches_pol'.format(TMP_PREFIX)) else: # Simple rasterisation (only area) # in G 7.6 also with support for 'centroid' if float(grass.version()['version'][:3]) >= 7.6: conv_types = ['area', 'centroid'] else: conv_types = ['area'] grass.run_command('v.to.rast', quiet=True, input=patches, use='cat', type=conv_types, output='{}_patches_pol'.format(TMP_PREFIX)) # Extract boundaries from patch raster map grass.run_command('r.mapcalc', expression='{p}_patches_boundary=if(\ {p}_patches_pol,\ if((\ (isnull({p}_patches_pol[-1,0])||| \ {p}_patches_pol[-1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,1])||| \ {p}_patches_pol[0,1]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[1,0])||| \ {p}_patches_pol[1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,-1])||| \ {p}_patches_pol[0,-1]!={p}_patches_pol)), \ {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX), quiet=True) rasterized_cats = grass.read_command( 'r.category', separator='newline', map='{p}_patches_boundary'.format(p=TMP_PREFIX)).replace( '\t', '').strip('\n') rasterized_cats = list( map(int, set([x for x in rasterized_cats.split('\n') if x != '']))) #Init output vector maps if they are requested by user network = VectorTopo(edge_map) network_columns = [(u'cat', 'INTEGER PRIMARY KEY'), (u'from_p', 'INTEGER'), (u'to_p', 'INTEGER'), (u'min_dist', 'DOUBLE PRECISION'), (u'dist', 'DOUBLE PRECISION'), (u'max_dist', 'DOUBLE PRECISION')] network.open('w', tab_name=edge_map, tab_cols=network_columns) vertex = VectorTopo(vertex_map) vertex_columns = [ (u'cat', 'INTEGER PRIMARY KEY'), (pop_proxy, 'DOUBLE PRECISION'), ] vertex.open('w', tab_name=vertex_map, tab_cols=vertex_columns) if p_flag: # Init cost paths file for start-patch grass.run_command('v.edit', quiet=True, map=shortest_paths, tool='create') grass.run_command('v.db.addtable', quiet=True, map=shortest_paths, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") start_region_bbox = Bbox(north=float(max_n), south=float(min_s), east=float(max_e), west=float(min_w)) vpatches = VectorTopo(patches, mapset=patches_mapset) vpatches.open('r', layer=int(layer)) ###Loop through patches vpatch_ids = np.array(vpatches.features_to_wkb_list( feature_type="centroid", bbox=start_region_bbox), dtype=[('vid', 'uint32'), ('cat', 'uint32'), ('geom', '|S10')]) cats = set(vpatch_ids['cat']) n_cats = len(cats) if n_cats < len(vpatch_ids['cat']): grass.verbose('At least one MultiPolygon found in patch map.\n \ Using average coordinates of the centroids for \ visual representation of the patch.') for cat in cats: if cat not in rasterized_cats: grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) continue grass.verbose("Calculating connectivity-distances for patch \ number {}".format(cat)) # Filter from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat] # Get patch ID if from_vpatch['vid'].size == 1: from_centroid = Centroid(v_id=int(from_vpatch['vid']), c_mapinfo=vpatches.c_mapinfo) from_x = from_centroid.x from_y = from_centroid.y # Get centroid if not from_centroid: continue else: xcoords = [] ycoords = [] for f_p in from_vpatch['vid']: from_centroid = Centroid(v_id=int(f_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(from_centroid.x) ycoords.append(from_centroid.y) # Get centroid if not from_centroid: continue from_x = np.average(xcoords) from_y = np.average(ycoords) # Get BoundingBox from_bbox = grass.parse_command('v.db.select', map=patch_map, flags='r', where='cat={}'.format(cat)) attr_filter = vpatches.table.filters.select(pop_proxy) attr_filter = attr_filter.where("cat={}".format(cat)) proxy_val = vpatches.table.execute().fetchone() # Prepare start patch start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat) reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat)) recl = grass.feed_command( 'r.reclass', quiet=True, input='{}_patches_boundary'.format(TMP_PREFIX), output=start_patch, rules='-') recl.stdin.write(reclass_rule) recl.stdin.close() recl.wait() # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised) #start_check = grass.parse_command('r.info', flags='r', map=start_patch) #start_check = grass.parse_command('r.univar', flags='g', map=start_patch) #print(start_check) """if start_check['min'] != '1': grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) grass.run_command('g.remove', flags='f', vector=start_patch, raster=start_patch, quiet=True) grass.del_temp_region() continue""" # Prepare stop patches ############################################ reg = grass.parse_command('g.region', flags='ug', quiet=True, raster=start_patch, n=float(from_bbox['n']) + float(cutoff), s=float(from_bbox['s']) - float(cutoff), e=float(from_bbox['e']) + float(cutoff), w=float(from_bbox['w']) - float(cutoff), align='{}_patches_pol'.format(TMP_PREFIX)) north = reg['n'] if max_n > reg['n'] else max_n south = reg['s'] if min_s < reg['s'] else min_s east = reg['e'] if max_e < reg['e'] else max_e west = reg['w'] if min_w > reg['w'] else min_w # Set region to patch search radius grass.use_temp_region() grass.run_command('g.region', quiet=True, n=north, s=south, e=east, w=west, align='{}_patches_pol'.format(TMP_PREFIX)) # Create buffer around start-patch as a mask # for cost distance analysis grass.run_command('r.buffer', quiet=True, input=start_patch, output='MASK', distances=cutoff) grass.run_command('r.mapcalc', quiet=True, expression='{pf}_patch_{p}_neighbours_contur=\ if({pf}_patches_boundary=={p},\ null(),\ {pf}_patches_boundary)'.format( pf=TMP_PREFIX, p=cat)) grass.run_command('r.mask', flags='r', quiet=True) # Calculate cost distance cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat) grass.run_command('r.cost', flags=dist_flags, quiet=True, overwrite=True, input=costs, output=cost_distance_map, start_rast=start_patch, memory=memory) #grass.run_command('g.region', flags='up') # grass.raster.raster_history(cost_distance_map) cdhist = History(cost_distance_map) cdhist.clear() cdhist.creator = os.environ['USER'] cdhist.write() # History object cannot modify description grass.run_command('r.support', map=cost_distance_map, description='Generated by r.connectivity.distance', history=os.environ['CMDLINE']) # Export distance at boundaries maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist' maps = maps.format(TMP_PREFIX, cat, prefix), connections = grass.encode( grass.read_command('r.stats', flags='1ng', quiet=True, input=maps, separator=';').rstrip('\n')) if connections: con_array = np.genfromtxt(BytesIO(connections), delimiter=';', dtype=None, names=['x', 'y', 'cat', 'dist']) else: grass.warning('No connections for patch {}'.format(cat)) # Write centroid to vertex map vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) grass.del_temp_region() continue #Find closest points on neigbour patches to_cats = set(np.atleast_1d(con_array['cat'])) to_coords = [] for to_cat in to_cats: connection = con_array[con_array['cat'] == to_cat] connection.sort(order=['dist']) pixel = border_dist if len( connection) > border_dist else len(connection) - 1 # closest_points_x = connection['x'][pixel] # closest_points_y = connection['y'][pixel] closest_points_to_cat = to_cat closest_points_min_dist = connection['dist'][0] closest_points_dist = connection['dist'][pixel] closest_points_max_dist = connection['dist'][-1] to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid'] if len(to_patch_ids) == 1: to_centroid = Centroid(v_id=to_patch_ids, c_mapinfo=vpatches.c_mapinfo) to_x = to_centroid.x to_y = to_centroid.y elif len(to_patch_ids) >= 1: xcoords = [] ycoords = [] for t_p in to_patch_ids: to_centroid = Centroid(v_id=int(t_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(to_centroid.x) ycoords.append(to_centroid.y) # Get centroid if not to_centroid: continue to_x = np.average(xcoords) to_y = np.average(ycoords) to_coords.append('{},{},{},{},{},{}'.format( connection['x'][0], connection['y'][0], to_cat, closest_points_min_dist, closest_points_dist, closest_points_max_dist)) #Save edges to network dataset if closest_points_dist <= 0: zero_dist = 1 # Write data to network network.write(Line([(from_x, from_y), (to_x, to_y)]), cat=lin_cat, attrs=( cat, int(closest_points_to_cat), closest_points_min_dist, closest_points_dist, closest_points_max_dist, )) network.table.conn.commit() lin_cat = lin_cat + 1 # Save closest points and shortest paths through cost raster as # vector map (r.drain limited to 1024 points) if requested if p_flag: grass.verbose('Extracting shortest paths for patch number \ {}...'.format(cat)) points_n = len(to_cats) tiles = int(points_n / 1024.0) rest = points_n % 1024 if not rest == 0: tiles = tiles + 1 tile_n = 0 while tile_n < tiles: tile_n = tile_n + 1 #Import closest points for start-patch in 1000er blocks sp = grass.feed_command('v.in.ascii', flags='nr', overwrite=True, quiet=True, input='-', stderr=subprocess.PIPE, output="{}_{}_cp".format( TMP_PREFIX, cat), separator=",", columns="x double precision,\ y double precision,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") sp.stdin.write(grass.encode("\n".join(to_coords))) sp.stdin.close() sp.wait() # Extract shortest paths for start-patch in chunks of # 1024 points cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat) start_points = "{}_{}_cp".format(TMP_PREFIX, cat) grass.run_command('r.drain', overwrite=True, quiet=True, input=cost_distance_map, output=cost_paths, drain=cost_paths, start_points=start_points) grass.run_command('v.db.addtable', map=cost_paths, quiet=True, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") grass.run_command('v.db.update', map=cost_paths, column='from_p', value=cat, quiet=True) grass.run_command('v.distance', quiet=True, from_=cost_paths, to=start_points, upload='to_attr', column='to_p', to_column='to_p') grass.run_command('v.db.join', quiet=True, map=cost_paths, column='to_p', other_column='to_p', other_table=start_points, subset_columns='dist_min,dist,dist_max') #grass.run_command('v.info', flags='c', # map=cost_paths) grass.run_command('v.patch', flags='ae', overwrite=True, quiet=True, input=cost_paths, output=shortest_paths) # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) # Remove temporary map data for patch if r_flag: grass.run_command('g.remove', flags='f', type='raster', name=cost_distance_map, quiet=True) vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Print progress message grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3) # Update counter for progress message counter = counter + 1 if zero_dist: grass.warning('Some patches are directly adjacent to others. \ Minimum distance set to 0.0000000001') # Close vector maps and build topology network.close() vertex.close() # Add vertex attributes # grass.run_command('v.db.addtable', map=vertex_map) # grass.run_command('v.db.join', map=vertex_map, column='cat', # other_table=in_db_connection[int(layer)]['table'], # other_column='cat', subset_columns=pop_proxy, # quiet=True) # Add history and meta data to produced maps grass.run_command('v.support', flags='h', map=edge_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) grass.run_command('v.support', flags='h', map=vertex_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) if p_flag: grass.run_command('v.support', flags='h', map=shortest_paths, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) # Output also Conefor files if requested if conefor_dir: query = """SELECT p_from, p_to, avg(dist) FROM (SELECT CASE WHEN from_p > to_p THEN to_p ELSE from_p END AS p_from, CASE WHEN from_p > to_p THEN from_p ELSE to_p END AS p_to, dist FROM {}) AS x GROUP BY p_from, p_to""".format(edge_map) with open(os.path.join(conefor_dir, 'undirected_connection_file'), 'w') as edges: edges.write( grass.read_command('db.select', sql=query, separator=' ')) with open(os.path.join(conefor_dir, 'directed_connection_file'), 'w') as edges: edges.write( grass.read_command('v.db.select', map=edge_map, separator=' ', flags='c')) with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes: nodes.write( grass.read_command('v.db.select', map=vertex_map, separator=' ', flags='c'))
def main(): #lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] vector_output = options["vector_output"] strds = options["strds"] where = options["where"] columns = options["columns"] if where == "" or where == " " or where == "\n": where = None overwrite = grass.overwrite() # Check the number of sample strds and the number of columns strds_names = strds.split(",") column_names = columns.split(",") if len(strds_names) != len(column_names): grass.fatal(_("The number of columns must be equal to the number of space time raster datasets")) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() mapset = grass.encode(grass.gisenv()["MAPSET"]) out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite) samples = [] first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif) # Single space time raster dataset if len(strds_names) == 1: rows = first_strds.get_registered_maps( columns="name,mapset,start_time,end_time", order="start_time", dbif=dbif) if not rows: dbif.close() grass.fatal(_("Space time raster dataset <%s> is empty") % out_sp.get_id()) for row in rows: start = row["start_time"] end = row["end_time"] raster_maps = [row["name"] + "@" + row["mapset"],] s = Sample(start, end, raster_maps) samples.append(s) else: # Multiple space time raster datasets for name in strds_names[1:]: dataset = tgis.open_old_stds(name, "strds", dbif) if dataset.get_temporal_type() != first_strds.get_temporal_type(): grass.fatal(_("Temporal type of space time raster datasets must be equal\n" "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%\ {"a":first_strds.get_id(), "type_a":first_strds.get_temporal_type(), "b":dataset.get_id(), "type_b":dataset.get_temporal_type()})) mapmatrizes = tgis.sample_stds_by_stds_topology("strds", "strds", strds_names, strds_names[0], False, None, "equal", False, False) for i in range(len(mapmatrizes[0])): isvalid = True mapname_list = [] for mapmatrix in mapmatrizes: entry = mapmatrix[i] if entry["samples"]: sample = entry["samples"][0] name = sample.get_id() if name is None: isvalid = False break else: mapname_list.append(name) if isvalid: entry = mapmatrizes[0][i] map = entry["granule"] start, end = map.get_temporal_extent_as_tuple() s = Sample(start, end, mapname_list) samples.append(s) num_samples = len(samples) # Get the layer and database connections of the input vector vector_db = grass.vector.vector_db(input) # We copy the vector table and create the new layers if vector_db: # Use the first layer to copy the categories from layers = "1," else: layers = "" first = True for layer in range(num_samples): layer += 1 # Skip existing layer if vector_db and layer in vector_db and \ vector_db[layer]["layer"] == layer: continue if first: layers += "%i" % (layer) first = False else: layers += ",%i" % (layer) vectmap = vector_output # We create a new vector map using the categories of the original map try: grass.run_command("v.category", input=input, layer=layers, output=vectmap, option="transfer", overwrite=overwrite) except CalledModuleError: grass.fatal(_("Unable to create new layers for vector map <%s>") % (vectmap)) title = _("Observaion of space time raster dataset(s) <%s>") % (strds) description= _("Observation of space time raster dataset(s) <%s>" " with vector map <%s>") % (strds, input) # Create the output space time vector dataset out_sp = tgis.open_new_stds(output, "stvds", first_strds.get_temporal_type(), title, description, first_strds.get_semantic_type(), dbif, overwrite) dummy = out_sp.get_new_map_instance(None) # Sample the space time raster dataset with the vector # map at specific layer with v.what.rast count = 1 for sample in samples: raster_names = sample.raster_names if len(raster_names) != len(column_names): grass.fatal(_("The number of raster maps in a granule must " "be equal to the number of column names")) # Create the columns creation string columns_string = "" for name, column in zip(raster_names, column_names): # The column is by default double precision coltype = "DOUBLE PRECISION" # Get raster map type raster_map = tgis.RasterDataset(name) raster_map.load() if raster_map.metadata.get_datatype() == "CELL": coltype = "INT" tmp_string = "%s %s,"%(column, coltype) columns_string += tmp_string # Remove last comma columns_string = columns_string[0:len(columns_string) - 1] # Try to add a column if vector_db and count in vector_db and vector_db[count]["table"]: try: grass.run_command("v.db.addcolumn", map=vectmap, layer=count, column=columns_string, overwrite=overwrite) except CalledModuleError: dbif.close() grass.fatal(_("Unable to add column %s to vector map <%s> " "with layer %i") % (columns_string, vectmap, count)) else: # Try to add a new table grass.message("Add table to layer %i" % (count)) try: grass.run_command("v.db.addtable", map=vectmap, layer=count, columns=columns_string, overwrite=overwrite) except CalledModuleError: dbif.close() grass.fatal(_("Unable to add table to vector map " "<%s> with layer %i") % (vectmap, count)) # Call v.what.rast for each raster map for name, column in zip(raster_names, column_names): try: grass.run_command("v.what.rast", map=vectmap, layer=count, raster=name, column=column, where=where) except CalledModuleError: dbif.close() grass.fatal(_("Unable to run v.what.rast for vector map <%s> " "with layer %i and raster map <%s>") % \ (vectmap, count, str(raster_names))) vect = out_sp.get_new_map_instance(dummy.build_id(vectmap, mapset, str(count))) vect.load() start = sample.start end = sample.end if out_sp.is_time_absolute(): vect.set_absolute_time(start, end) else: vect.set_relative_time( start, end, first_strds.get_relative_time_unit()) if vect.is_in_db(dbif): vect.update_all(dbif) else: vect.insert(dbif) out_sp.register_map(vect, dbif) count += 1 out_sp.update_from_registered_maps(dbif) dbif.close()