def setUpClass(cls): """Initiate the temporal GIS and set the region """ os.putenv("GRASS_OVERWRITE", "1") tgis.init() cls.use_temp_region() cls.runModule("g.region", s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10) cls.runModule("r.mapcalc", expression="a1 = 100", overwrite=True) cls.runModule("r.mapcalc", expression="a2 = 200", overwrite=True) cls.runModule("r.mapcalc", expression="a3 = 300", overwrite=True) cls.runModule("r.mapcalc", expression="a4 = 400", overwrite=True) cls.runModule( "t.create", type="strds", temporaltype="relative", output="A", title="A test", description="A test", overwrite=True, ) cls.runModule( "t.register", type="raster", input="A", maps="a1,a2,a3,a4", start="0", increment="14", unit="days", overwrite=True, )
def setUpClass(cls): """Initiate the temporal GIS and set the region """ os.putenv("GRASS_OVERWRITE", "1") tgis.init() cls.use_temp_region() cls.runModule("g.region", s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10) cls.runModule("v.random", quiet=True, npoints=20, seed=1, output="a1") cls.runModule("v.random", quiet=True, npoints=20, seed=1, output="a2") cls.runModule("v.random", quiet=True, npoints=20, seed=1, output="a3") cls.runModule("v.random", quiet=True, npoints=20, seed=1, output="a4") cls.runModule( "t.create", type="stvds", temporaltype="relative", output="A", title="A test", description="A test", overwrite=True, ) cls.runModule( "t.register", type="vector", input="A", maps="a1,a2,a3,a4", start="0", increment="14", unit="days", overwrite=True, )
def test(): from pprint import pprint # Make sure the temporal database exists tgis.init() temp = TemporalManager() # timeseries = createAbsolutePoint() # timeseries = createRelativePoint() # timeseries1, timeseries2 = createAbsoluteInterval() timeseries1, timeseries2 = createRelativeInterval() temp.AddTimeSeries(timeseries1, 'strds') temp.AddTimeSeries(timeseries2, 'strds') try: warn = temp.EvaluateInputData() print warn except GException as e: print e return print '///////////////////////////' gran = temp.GetGranularity() print "granularity: " + str(gran) pprint (temp.GetLabelsAndMaps())
def main(): name = options["input"] type_ = options["type"] shellstyle = flags['g'] system = flags['d'] history = flags['h'] # Make sure the temporal database exists tgis.init() dbif, connected = tgis.init_dbif(None) rows = tgis.get_tgis_metadata(dbif) if system and not shellstyle: # 0123456789012345678901234567890 print(" +------------------- Temporal DBMI backend information ----------------------+") print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__)) print(" | Temporal database string:... " + str( tgis.get_tgis_database_string())) print(" | SQL template path:.......... " + str( tgis.get_sql_template_path())) if rows: for row in rows: print(" | %s .......... %s"%(row[0], row[1])) print(" +----------------------------------------------------------------------------+") return elif system: print("dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'") print("dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'") print("sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'") if rows: for row in rows: print("%s=\'%s\'"%(row[0], row[1])) return if not system and not name: grass.fatal(_("Please specify %s=") % ("name")) if name.find("@") >= 0: id_ = name else: id_ = name + "@" + grass.gisenv()["MAPSET"] dataset = tgis.dataset_factory(type_, id_) if dataset.is_in_db(dbif) == False: grass.fatal(_("Dataset <%s> not found in temporal database") % (id_)) dataset.select(dbif) if history == True and type in ["strds", "stvds", "str3ds"]: dataset.print_history() return if shellstyle == True: dataset.print_shell_info() else: dataset.print_info()
def main(): expression = options['expression'] basename = options['basename'] nprocs = options["nprocs"] spatial = flags["s"] register_null = flags["n"] granularity = flags["g"] dry_run = flags["d"] # Check for PLY istallation try: import ply.lex as lex import ply.yacc as yacc except: grass.script.fatal(_("Please install PLY (Lex and Yacc Python implementation) to use the temporal algebra modules. " "You can use t.rast.mapcalc that provides a limited but useful alternative to " "t.rast.algebra without PLY requirement.")) tgis.init(True) p = tgis.TemporalRasterAlgebraParser(run = True, debug=False, spatial=spatial, nprocs=nprocs, register_null=register_null, dry_run=dry_run) if granularity: if not p.setup_common_granularity(expression=expression, lexer = tgis.TemporalRasterAlgebraLexer()): grass.script.fatal(_("Unable to process the expression in granularity algebra mode")) pc = p.parse(expression, basename, grass.script.overwrite()) if dry_run is True: import pprint pprint.pprint(pc)
def setUpClass(cls): """Initiate the temporal GIS and set the region """ os.putenv("GRASS_OVERWRITE", "1") tgis.init() cls.use_temp_region() cls.runModule("g.region", s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10) cls.runModule("r.mapcalc", expression="a1 = 100.0", overwrite=True) cls.runModule("r.mapcalc", expression="a2 = 200.0", overwrite=True) cls.runModule("r.mapcalc", expression="a3 = 300.0", overwrite=True) cls.runModule("r.mapcalc", expression="a4 = 400.0", overwrite=True) cls.runModule("r.mapcalc", expression="a5 = 500.0", overwrite=True) cls.runModule("r.mapcalc", expression="a6 = 600.0", overwrite=True) cls.runModule("r.mapcalc", expression="a7 = null()", overwrite=True) cls.runModule( "t.create", type="strds", temporaltype="absolute", output="A", title="A test", description="A test", overwrite=True, ) cls.runModule( "t.register", flags="i", type="raster", input="A", maps="a1,a2,a3,a4,a5,a6,a7", start="2001-01-15 12:05:45", increment="14 days", overwrite=True, )
def main(): # Get the options input = options["input"] where = options["where"] columns = options["columns"] tempwhere = options["t_where"] layer = options["layer"] separator = grass.separator(options["separator"]) if where == "" or where == " " or where == "\n": where = None if columns == "" or columns == " " or columns == "\n": columns = None # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "stvds") rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time", tempwhere, "start_time", None) col_names = "" if rows: for row in rows: vector_name = "%s@%s" % (row["name"], row["mapset"]) # In case a layer is defined in the vector dataset, # we override the option layer if row["layer"]: layer = row["layer"] select = grass.read_command("v.db.select", map=vector_name, layer=layer, columns=columns, separator="%s" % (separator), where=where) if not select: grass.fatal(_("Unable to run v.db.select for vector map <%s> " "with layer %s") % (vector_name, layer)) # The first line are the column names list = select.split("\n") count = 0 for entry in list: if entry.strip() != "": # print the column names in case they change if count == 0: col_names_new = "start_time%send_time%s%s" % ( separator, separator, entry) if col_names != col_names_new: col_names = col_names_new print col_names else: if row["end_time"]: print "%s%s%s%s%s" % (row["start_time"], separator, row["end_time"], separator, entry) else: print "%s%s%s%s" % (row["start_time"], separator, separator, entry) count += 1
def main(): # Get the options input = options["input"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.gisenv()["MAPSET"] if input.find("@") >= 0: old_id = input else: old_id = input + "@" + mapset if output.find("@") >= 0: new_id = output else: new_id = output + "@" + mapset # Do not overwrite yourself if new_id == old_id: return dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.dataset_factory(type, old_id) if new_id.split("@")[1] != mapset: grass.fatal(_("Space time %s dataset <%s> can not be renamed. " "Mapset of the new identifier differs from the current " "mapset.") % (stds.get_new_map_instance(None).get_type(), old_id)) if stds.is_in_db(dbif=dbif) == False: dbif.close() grass.fatal(_("Space time %s dataset <%s> not found") % ( stds.get_new_map_instance(None).get_type(), old_id)) # Check if the new id is in the database new_stds = tgis.dataset_factory(type, new_id) if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False: dbif.close() grass.fatal(_("Unable to rename Space time %s dataset <%s>. Name <%s> " "is in use, please use the overwrite flag.") % ( stds.get_new_map_instance(None).get_type(), old_id, new_id)) # Remove an already existing space time dataset if new_stds.is_in_db(dbif=dbif) == True: new_stds.delete(dbif=dbif) stds.select(dbif=dbif) stds.rename(ident=new_id, dbif=dbif) stds.update_command_string(dbif=dbif)
def main(): # Get the options input = options["input"] output = options["output"] method = options["method"] order = options["order"] where = options["where"] add_time = flags["t"] nulls = flags["n"] # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "strds") rows = sp.get_registered_maps("id", where, order, None) if rows: # Create the r.series input file filename = grass.tempfile(True) file = open(filename, 'w') for row in rows: string = "%s\n" % (row["id"]) file.write(string) file.close() flag = "" if len(rows) > 1000: grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing")) flag += "z" if nulls: flag += "n" try: grass.run_command("r.series", flags=flag, file=filename, output=output, overwrite=grass.overwrite(), method=method) except CalledModuleError: grass.fatal(_("%s failed. Check above error messages.") % 'r.series') if not add_time: # Create the time range for the output map if output.find("@") >= 0: id = output else: mapset = grass.gisenv()["MAPSET"] id = output + "@" + mapset map = sp.get_new_map_instance(id) map.load() map.set_temporal_extent(sp.get_temporal_extent()) # Register the map in the temporal database if map.is_in_db(): map.update_all() else: map.insert()
def setUpClass(cls): """Initiate the temporal GIS and set the region """ tgis.init() cls.use_temp_region() cls.runModule("g.gisenv", set="TGIS_USE_CURRENT_MAPSET=1") cls.runModule("g.region", s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10)
def test(): import shutil from core.layerlist import LayerList, Layer from animation.data import AnimLayer from animation.utils import layerListToCmdsMatrix import grass.temporal as tgis tgis.init() layerList = LayerList() layer = AnimLayer() layer.mapType = 'strds' layer.name = 'JR' layer.cmd = ['d.rast', 'map=elev_2007_1m'] layerList.AddLayer(layer) layer = Layer() layer.mapType = 'vect' layer.name = 'buildings_2009_approx' layer.cmd = ['d.vect', 'map=buildings_2009_approx', 'color=grey'] layer.opacity = 50 layerList.AddLayer(layer) bPool = BitmapPool() mapFilesPool = MapFilesPool() tempDir = '/tmp/test' if os.path.exists(tempDir): shutil.rmtree(tempDir) os.mkdir(tempDir) # comment this line to keep the directory after prgm ends # cleanUp = CleanUp(tempDir) # import atexit # atexit.register(cleanUp) prov = BitmapProvider(bPool, mapFilesPool, tempDir, imageWidth=640, imageHeight=480) prov.renderingStarted.connect( lambda count: sys.stdout.write("Total number of maps: {c}\n".format(c=count))) prov.renderingContinues.connect( lambda current, text: sys.stdout.write("Current number: {c}\n".format(c=current))) prov.compositionStarted.connect( lambda count: sys.stdout.write("Composition: total number of maps: {c}\n".format(c=count))) prov.compositionContinues.connect( lambda current, text: sys.stdout.write("Composition: Current number: {c}\n".format(c=current))) prov.mapsLoaded.connect( lambda: sys.stdout.write("Maps loading finished\n")) cmdMatrix = layerListToCmdsMatrix(layerList) prov.SetCmds(cmdMatrix, [l.opacity for l in layerList]) app = wx.App() prov.Load(bgcolor=(13, 156, 230), nprocs=4) for key in bPool.keys(): if key is not None: bPool[key].SaveFile(os.path.join(tempDir, key + '.png'), wx.BITMAP_TYPE_PNG)
def setUpClass(cls): """Initiate the temporal GIS and set the region """ os.putenv("GRASS_OVERWRITE", "1") # Use always the current mapset as temporal database cls.runModule("g.gisenv", set="TGIS_USE_CURRENT_MAPSET=1") tgis.init() cls.use_temp_region() cls.runModule('g.region', n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0)
def main(): # Get the options input = options["input"] output = options["output"] method = options["method"] order = options["order"] where = options["where"] add_time = flags["t"] nulls = flags["n"] # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "strds") rows = sp.get_registered_maps("id", where, order, None) if rows: # Create the r.series input file filename = grass.tempfile(True) file = open(filename, 'w') for row in rows: string = "%s\n" % (row["id"]) file.write(string) file.close() flag = "z" if nulls: flag += "n" ret = grass.run_command("r.series", flags=flag, file=filename, output=output, overwrite=grass.overwrite(), method=method) if ret == 0 and not add_time: # Create the time range for the output map if output.find("@") >= 0: id = output else: mapset = grass.gisenv()["MAPSET"] id = output + "@" + mapset map = sp.get_new_map_instance(id) map.load() map.set_temporal_extent(sp.get_temporal_extent()) # Register the map in the temporal database if map.is_in_db(): map.update_all() else: map.insert()
def __init__(self, parent): wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=_("GRASS GIS Timeline Tool")) tgis.init(True) self.datasets = [] self.timeData = {} self._layout() self.temporalType = None self.unit = None # We create a database interface here to speedup the GUI self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect()
def main(): # Get the options name = options["output"] type = options["type"] temporaltype = options["temporaltype"] title = options["title"] descr = options["description"] semantic = options["semantictype"] tgis.init() tgis.open_new_stds(name, type, temporaltype, title, descr, semantic, None, grass.overwrite())
def main(): # Get the options input = options["input"] where = options["where"] extended = flags["e"] no_header = flags["s"] separator = grass.separator(options["separator"]) # Make sure the temporal database exists tgis.init() tgis.print_gridded_dataset_univar_statistics( "strds", input, where, extended, no_header, separator)
def create_stds(self, stds_name, overwrite): stds_id = tgis.AbstractMapDataset.build_id(stds_name, self.mapset) stds_type="strds" temporal_type="relative" tgis.init() self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect() self.stds_h = tgis.open_new_stds(name=stds_id, type=stds_type, temporaltype=temporal_type, title='', descr='', semantic="mean", dbif=self.dbif, overwrite=overwrite) return self
def main(): # Get the options _input = options["input"] output = options["output"] compression = options["compression"] workdir = options["workdir"] where = options["where"] _format = options["format"] # Make sure the temporal database exists tgis.init() # Export the space time raster dataset tgis.export_stds( _input, output, compression, workdir, where, _format, "strds")
def setUpClass(cls): """Initiate the temporal GIS and set the region """ tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() cls.runModule("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a1 = 1") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a2 = 2") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a3 = 3") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a4 = 4") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a5 = 5") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a6 = 6") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b1 = 7") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b2 = 8") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="c1 = 9") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d1 = 10") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d2 = 11") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d3 = 12") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="singletmap = 99") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="nullmap = null()") tgis.open_new_stds(name="A", type="strds", temporaltype="absolute", title="A", descr="A", semantic="field", overwrite=True) tgis.open_new_stds(name="B", type="strds", temporaltype="absolute", title="B", descr="B", semantic="field", overwrite=True) tgis.open_new_stds(name="C", type="strds", temporaltype="absolute", title="C", descr="C", semantic="field", overwrite=True) tgis.open_new_stds(name="D", type="strds", temporaltype="absolute", title="D", descr="D", semantic="field", overwrite=True) tgis.register_maps_in_space_time_dataset(type="raster", name="A", maps="a1,a2,a3,a4,a5,a6", start="2001-01-01", increment="1 month", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name="B", maps="b1,b2", start="2001-01-01", increment="3 months", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name="C", maps="c1", start="2001-01-01", increment="1 year", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name="D", maps="d1", start="2001-01-01", increment="5 days", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name="D", maps="d2", start="2001-03-01", increment="5 days", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name="D", maps="d3", start="2001-05-01", increment="5 days", interval=True) tgis.register_maps_in_space_time_dataset(type="raster", name=None, maps="singletmap", start="2001-03-01", end="2001-04-01") tgis.register_maps_in_space_time_dataset(type="raster", name=None, maps="nullmap", start="2001-01-01", end="2001-07-01")
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] expression = options["expression"] base = options["base"] nprocs = int(options["nprocs"]) register_null = flags["n"] # Make sure the temporal database exists tgis.init() tgis.extract_dataset(input, output, "raster3d", where, expression, base, nprocs, register_null)
def main(): # Get the options inputs = options["inputs"] sampler = options["sample"] samtype = options["samtype"] intype = options["intype"] separator = grass.separator(options["separator"]) method = options["method"] header = flags["c"] spatial = flags["s"] # Make sure the temporal database exists tgis.init() tgis.sample_stds_by_stds_topology(intype, samtype, inputs, sampler, header, separator, method, spatial, True)
def main(): name = options["input"] type = options["type"] # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.open_old_stds(name, type, dbif) stds.snap(dbif=dbif) stds.update_command_string(dbif=dbif) dbif.close()
def main(): expression = options['expression'] basename = options['basename'] spatial = flags["s"] stdstype = "stvds" # Check for PLY istallation try: import ply.lex as lex import ply.yacc as yacc except: grass.script.fatal(_("Please install PLY (Lex and Yacc Python implementation) to use the temporal algebra modules.")) tgis.init(True) p = tgis.TemporalVectorAlgebraParser(run = True, debug=False, spatial = spatial) p.parse(expression, basename, grass.script.overwrite())
def main(): expression = options["expression"] spatial = flags["s"] stdstype = options["type"] # Check for PLY istallation try: import ply.lex as lex import ply.yacc as yacc except: grass.fatal(_("Please install PLY (Lex and Yacc Python implementation) to use the temporal algebra modules.")) tgis.init(True) p = tgis.TemporalAlgebraParser(run=True, debug=False, spatial=spatial) p.parse(expression, stdstype, overwrite=grass.overwrite)
def main(): # Get the options input = options["input"] columns = options["columns"] order = options["order"] where = options["where"] separator = grass.separator(options["separator"]) method = options["method"] header = flags["u"] output = options["output"] # Make sure the temporal database exists tgis.init() tgis.list_maps_of_stds("stvds", input, columns, order, where, separator, method, header, outpath=output)
def __init__(self, parent, giface): wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=_("GRASS GIS Temporal Plot Tool")) tgis.init(True) self._giface = giface self.datasetsV = None self.datasetsR = None # self.vectorDraw=False # self.rasterDraw=False self.init() self._layout() # We create a database interface here to speedup the GUI self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect() self.Bind(wx.EVT_CLOSE, self.onClose)
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] expression = options["expression"] layer = options["layer"] type = options["type"] base = options["basename"] nprocs = int(options["nprocs"]) register_null = flags["n"] # Make sure the temporal database exists tgis.init() tgis.extract_dataset(input, output, "vector", where, expression, base, nprocs, register_null, layer, type)
def main(): # Get the options input = options["input"] output = options["output"] directory = options["directory"] title = options["title"] descr = options["description"] location = options["location"] base = options["basename"] exp = flags["e"] overr = flags["o"] create = flags["c"] tgis.init() tgis.import_stds(input, output, directory, title, descr, location, None, exp, overr, create, "stvds", base)
def setUpClass(cls): """Initiate the temporal GIS and set the region """ tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() ret = grass.script.run_command("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=100.0, b=0.0, res=10.0) cls.runModule("r3.mapcalc", overwrite=True, quiet=True, expression="a1 = 1") cls.runModule("r3.mapcalc", overwrite=True, quiet=True, expression="a2 = 2") cls.runModule("r3.mapcalc", overwrite=True, quiet=True, expression="a3 = 3") cls.runModule("r3.mapcalc", overwrite=True, quiet=True, expression="a4 = 4") tgis.open_new_stds(name="A", type="str3ds", temporaltype="absolute", title="A", descr="A", semantic="field", overwrite=True) tgis.register_maps_in_space_time_dataset(type="raster_3d", name="A", maps="a1,a2,a3,a4", start="2001-01-01", increment="1 day", interval=True)
def main(): # Get the options input = options["input"] twhere = options["twhere"] layer = options["layer"] type = options["type"] column = options["column"] where = options["where"] extended = flags["e"] header = flags["s"] separator = grass.separator(options["separator"]) # Make sure the temporal database exists tgis.init() tgis.print_vector_dataset_univar_statistics( input, twhere, layer, type, column, where, extended, header, separator)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] # Make sure the temporal database exists tgis.init() mapset = grass.gisenv()["MAPSET"] sp = tgis.open_old_stds(input, "strds") grass.use_temp_region() maps = sp.get_registered_maps_as_objects_by_granularity() num_maps = len(maps) # get datatype of the first map if maps: maps[0][0].select() datatype = maps[0][0].metadata.get_datatype() else: datatype = None # Get the granularity and set bottom, top and top-bottom resolution granularity = sp.get_granularity() # This is the reference time to scale the z coordinate reftime = datetime(1900, 1, 1) # We set top and bottom according to the start time in relation # to the date 1900-01-01 00:00:00 # In case of days, hours, minutes and seconds, a double number # is used to represent days and fracs of a day # Space time voxel cubes with montly or yearly granularity can not be # mixed with other temporal units # Compatible temporal units are : days, hours, minutes and seconds # Incompatible are years and moths start, end = sp.get_temporal_extent_as_tuple() if sp.is_time_absolute(): unit = granularity.split(" ")[1] granularity = float(granularity.split(" ")[0]) print("Gran from stds %0.15f" % (granularity)) if unit == "years" or unit == "year": bottom = float(start.year - 1900) top = float(granularity * num_maps) elif unit == "months" or unit == "month": bottom = float((start.year - 1900) * 12 + start.month) top = float(granularity * num_maps) else: bottom = float(tgis.time_delta_to_relative_time(start - reftime)) days = 0.0 hours = 0.0 minutes = 0.0 seconds = 0.0 if unit == "days" or unit == "day": days = float(granularity) if unit == "hours" or unit == "hour": hours = float(granularity) if unit == "minutes" or unit == "minute": minutes = float(granularity) if unit == "seconds" or unit == "second": seconds = float(granularity) granularity = float(days + hours / 24.0 + minutes / 1440.0 + seconds / 86400.0) else: unit = sp.get_relative_time_unit() bottom = start top = float(bottom + granularity * float(num_maps)) try: grass.run_command("g.region", t=top, b=bottom, tbres=granularity) except CalledModuleError: grass.fatal(_("Unable to set 3D region")) # Create a NULL map to fill the gaps null_map = "temporary_null_map_%i" % os.getpid() if datatype == "DCELL": grass.mapcalc("%s = double(null())" % (null_map)) elif datatype == "FCELL": grass.mapcalc("%s = float(null())" % (null_map)) else: grass.mapcalc("%s = null()" % (null_map)) if maps: count = 0 map_names = "" for map in maps: # Use the first map id = map[0].get_id() # None ids will be replaced by NULL maps if id is None: id = null_map if count == 0: map_names = id else: map_names += ",%s" % id count += 1 try: grass.run_command( "r.to.rast3", input=map_names, output=output, overwrite=grass.overwrite(), ) except CalledModuleError: grass.fatal(_("Unable to create 3D raster map <%s>" % output)) grass.run_command("g.remove", flags="f", type="raster", name=null_map) title = _("Space time voxel cube") descr = _("This space time voxel cube was created with t.rast.to.rast3") # Set the unit try: grass.run_command( "r3.support", map=output, vunit=unit, title=title, description=descr, overwrite=grass.overwrite(), ) except CalledModuleError: grass.warning(_("%s failed to set units.") % "r3.support") # Register the space time voxel cube in the temporal GIS if output.find("@") >= 0: id = output else: id = output + "@" + mapset start, end = sp.get_temporal_extent_as_tuple() r3ds = tgis.Raster3DDataset(id) if r3ds.is_in_db(): r3ds.select() r3ds.delete() r3ds = tgis.Raster3DDataset(id) r3ds.load() if sp.is_time_absolute(): r3ds.set_absolute_time(start, end) else: r3ds.set_relative_time(start, end, sp.get_relative_time_unit()) r3ds.insert()
def main(): # Get the options _input = options["input"] output = options["output"] base = options["basename"] method = options["method"] red_band = options["red"] nir_band = options["nir"] target_band = options["target"] nprocs = int(options["nprocs"]) register_null = flags["n"] spatial = flags["s"] new_flags = "" if register_null: new_flags = "n" if spatial: new_flags = new_flags + "s" # get list of bands available in the input strds t_info = grass.parse_command('t.info', input=_input, flags='g') input_bands = t_info["semantic_labels"].split(',') # get the sensor appreviation split by _ sensor_abbr = None for band in input_bands: if "_" in band: sensor_abbr = band.split('_')[0] # TODO: check if sensor abbreviation changes break # find bands if red_band not in input_bands: if red_band != "red": grass.fatal("Band %s not found in %s" % (red_band, _input)) red_band = None if sensor_abbr is not None: if sensor_abbr == "L5": red_band = "L5_3" elif sensor_abbr == "L7": red_band = "L7_3" elif sensor_abbr == "L8": red_band = "L8_4" elif sensor_abbr == "S2": red_band = "S2_4" if red_band is None: grass.fatal("No red channel band found in %s" % (_input)) if nir_band not in input_bands: if nir_band != "nir": grass.fatal("Band %s not found in %s" % (nir_band, _input)) nir_band = None if sensor_abbr is not None: if sensor_abbr == "L5": nir_band = "L5_4" elif sensor_abbr == "L7": nir_band = "L7_4" elif sensor_abbr == "L8": nir_band = "L8_5" elif sensor_abbr == "S2": nir_band = "S2_8" if nir_band is None: grass.fatal("No nir channel band found in %s" % (_input)) new_inputs = [] if '@' in _input: strds, mapset = _input.split('@') new_inputs.append("%s.%s@%s" % (strds, red_band, mapset)) new_inputs.append("%s.%s@%s" % (strds, nir_band, mapset)) expression = ( "float(%(instrds)s.%(nir)s@%(mapset)s - %(instrds)s.%(red)s@%(mapset)s) / " "(%(instrds)s.%(nir)s@%(mapset)s + %(instrds)s.%(red)s@%(mapset)s)" % { "instrds": strds, "nir": nir_band, "red": red_band, "mapset": mapset }) else: new_inputs.append("%s.%s" % (_input, red_band)) new_inputs.append("%s.%s" % (_input, nir_band)) expression = ("float(%(instrds)s.%(nir)s - %(instrds)s.%(red)s) / " "(%(instrds)s.%(nir)s + %(instrds)s.%(red)s)" % { "instrds": _input, "nir": nir_band, "red": red_band }) # print(expression) grass.run_command('t.rast.mapcalc', inputs=(',').join(new_inputs), expression=expression, method=method, output=output, basename=base, nprocs=nprocs, flags=new_flags) # if target band is given, the new raster maps must be registered in the input strds if target_band: import grass.temporal as tgis # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() in_sp = tgis.open_old_stds(_input, "strds", dbif) out_sp = tgis.open_old_stds(output, "strds", dbif) maps = out_sp.get_registered_maps_as_objects(dbif=dbif) if not maps: dbif.close() grass.warning("Space time raster dataset <%s> is empty" % in_sp.get_id()) return for map in maps: map.set_semantic_label(target_band) # Insert map in temporal database map.update(dbif) in_sp.register_map(map, dbif) # remove temporary strds dbif.close() grass.run_command('t.remove', inputs=output, type="strds", flags='f')
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.gisenv()["MAPSET"] if input.find("@") >= 0: old_id = input else: old_id = input + "@" + mapset if output.find("@") >= 0: new_id = output else: new_id = output + "@" + mapset # Do not overwrite yourself if new_id == old_id: return dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.dataset_factory(type, old_id) if new_id.split("@")[1] != mapset: grass.fatal( _("Space time %s dataset <%s> can not be renamed. " "Mapset of the new identifier differs from the current " "mapset.") % (stds.get_new_map_instance(None).get_type(), old_id)) if stds.is_in_db(dbif=dbif) == False: dbif.close() grass.fatal( _("Space time %s dataset <%s> not found") % (stds.get_new_map_instance(None).get_type(), old_id)) # Check if the new id is in the database new_stds = tgis.dataset_factory(type, new_id) if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False: dbif.close() grass.fatal( _("Unable to rename Space time %s dataset <%s>. Name <%s> " "is in use, please use the overwrite flag.") % (stds.get_new_map_instance(None).get_type(), old_id, new_id)) # Remove an already existing space time dataset if new_stds.is_in_db(dbif=dbif) == True: new_stds.delete(dbif=dbif) stds.select(dbif=dbif) stds.rename(ident=new_id, dbif=dbif) stds.update_command_string(dbif=dbif)
def main(): parsed_obs = dict() service = SensorObservationService(options['url'], version=options['version'], username=options['username'], password=options['password']) if any(value is True and key in ['o', 'v', 'p', 't'] for key, value in flags.iteritems()): get_description(service, options, flags) if options['offering'] == '' or options['output'] == '': if sys.version >= (3, 0): sys.tracebacklimit = None else: sys.tracebacklimit = 0 raise AttributeError("You have to define any flags or use 'output' and" " 'offering' parameters to get the data") if options['granularity'] != '': import grass.temporal as tgis tgis.init() secondsGranularity = int( tgis.gran_to_gran(options['granularity'], '1 second', True)) else: secondsGranularity = 1 for off in options['offering'].split(','): # TODO: Find better way than iteration (at best OWSLib upgrade) procedure, observed_properties, event_time = handle_not_given_options( service, off, options['procedure'], options['observed_properties'], options['event_time']) event_time = 'T'.join(event_time.split(' ')) obs = service.get_observation( offerings=[off], responseFormat=options['response_format'], observedProperties=observed_properties, procedure=procedure, eventTime=event_time, username=options['username'], password=options['password']) try: if options['version'] in ['1.0.0', '1.0'] and str( options['response_format'] ) == 'text/xml;subtype="om/1.0.0"': for prop in observed_properties: parsed_obs.update({prop: xml2geojson(obs, prop)}) elif str(options['response_format']) == 'application/json': for prop in observed_properties: parsed_obs.update({prop: json2geojson(obs, prop)}) except AttributeError: if sys.version >= (3, 0): sys.tracebacklimit = None else: sys.tracebacklimit = 0 raise AttributeError('There is no data for at least one of your ' 'procedures, could you change the time ' 'parameter, observed properties, ' 'procedures or offerings') create_maps(parsed_obs, off, secondsGranularity) return 0
def main(): # lazy imports import grass.temporal as tgis from grass.pygrass.modules import Module # Get the options input = options["input"] output = options["output"] start = options["start"] stop = options["stop"] base = options["basename"] cycle = options["cycle"] lower = options["lower"] upper = options["upper"] offset = options["offset"] limits = options["limits"] shift = options["shift"] scale = options["scale"] method = options["method"] granularity = options["granularity"] register_null = flags["n"] reverse = flags["r"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() mapset = tgis.get_current_mapset() if input.find("@") >= 0: id = input else: id = input + "@" + mapset input_strds = tgis.SpaceTimeRasterDataset(id) if not input_strds.is_in_db(): dbif.close() grass.fatal(_("Space time raster dataset <%s> not found") % (id)) input_strds.select(dbif) if output.find("@") >= 0: out_id = output else: out_id = output + "@" + mapset # The output space time raster dataset output_strds = tgis.SpaceTimeRasterDataset(out_id) if output_strds.is_in_db(dbif): if not grass.overwrite(): dbif.close() grass.fatal( _("Space time raster dataset <%s> is already in the " "database, use overwrite flag to overwrite") % out_id) if tgis.check_granularity_string(granularity, input_strds.get_temporal_type()) is False: dbif.close() grass.fatal(_("Invalid granularity")) if tgis.check_granularity_string(cycle, input_strds.get_temporal_type()) is False: dbif.close() grass.fatal(_("Invalid cycle")) if offset: if tgis.check_granularity_string( offset, input_strds.get_temporal_type()) is False: dbif.close() grass.fatal(_("Invalid offset")) # The lower threshold space time raster dataset if lower: if not range: dbif.close() grass.fatal( _("You need to set the range to compute the occurrence" " space time raster dataset")) if lower.find("@") >= 0: lower_id = lower else: lower_id = lower + "@" + mapset lower_strds = tgis.SpaceTimeRasterDataset(lower_id) if not lower_strds.is_in_db(): dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (lower_strds.get_id())) if lower_strds.get_temporal_type() != input_strds.get_temporal_type(): dbif.close() grass.fatal( _("Temporal type of input strds and lower strds must be equal") ) lower_strds.select(dbif) # The upper threshold space time raster dataset if upper: if not lower: dbif.close() grass.fatal( _("The upper option works only in conjunction with the lower option" )) if upper.find("@") >= 0: upper = upper else: upper_id = upper + "@" + mapset upper_strds = tgis.SpaceTimeRasterDataset(upper_id) if not upper_strds.is_in_db(): dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (upper_strds.get_id())) if upper_strds.get_temporal_type() != input_strds.get_temporal_type(): dbif.close() grass.fatal( _("Temporal type of input strds and upper strds must be equal") ) upper_strds.select(dbif) input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple( ) if input_strds.is_time_absolute(): start = tgis.string_to_datetime(start) if stop: stop = tgis.string_to_datetime(stop) else: stop = input_strds_end start = tgis.adjust_datetime_to_granularity(start, granularity) else: start = int(start) if stop: stop = int(stop) else: stop = input_strds_end if input_strds.is_time_absolute(): end = tgis.increment_datetime_by_string(start, cycle) else: end = start + cycle limit_relations = [ "EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS" ] count = 1 output_maps = [] while input_strds_end > start and stop > start: # Make sure that the cyclic computation will stop at the correct time if stop and end > stop: end = stop where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start), str(end)) input_maps = input_strds.get_registered_maps_as_objects(where=where, dbif=dbif) grass.message(_("Processing cycle %s - %s" % (str(start), str(end)))) if len(input_maps) == 0: continue # Lets create a dummy list of maps with granularity conform intervals gran_list = [] gran_list_low = [] gran_list_up = [] gran_start = start while gran_start < end: map = input_strds.get_new_map_instance("%i@%i" % (count, count)) if input_strds.is_time_absolute(): gran_end = tgis.increment_datetime_by_string( gran_start, granularity) map.set_absolute_time(gran_start, gran_end) gran_start = tgis.increment_datetime_by_string( gran_start, granularity) else: gran_end = gran_start + granularity map.set_relative_time(gran_start, gran_end, input_strds.get_relative_time_unit()) gran_start = gran_start + granularity gran_list.append(copy(map)) gran_list_low.append(copy(map)) gran_list_up.append(copy(map)) # Lists to compute the topology with upper and lower datasets # Create the topology between the granularity conform list and all maps # of the current cycle gran_topo = tgis.SpatioTemporalTopologyBuilder() gran_topo.build(gran_list, input_maps) if lower: lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif) gran_lower_topo = tgis.SpatioTemporalTopologyBuilder() gran_lower_topo.build(gran_list_low, lower_maps) if upper: upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif) gran_upper_topo = tgis.SpatioTemporalTopologyBuilder() gran_upper_topo.build(gran_list_up, upper_maps) old_map_name = None # Aggregate num_maps = len(gran_list) for i in range(num_maps): if reverse: map = gran_list[num_maps - i - 1] else: map = gran_list[i] # Select input maps based on temporal topology relations input_maps = [] if map.get_equal(): input_maps += map.get_equal() elif map.get_contains(): input_maps += map.get_contains() elif map.get_overlaps(): input_maps += map.get_overlaps() elif map.get_overlapped(): input_maps += map.get_overlapped() elif map.get_during(): input_maps += map.get_during() # Check input maps if len(input_maps) == 0: continue # New output map if input_strds.get_temporal_type( ) == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), input_strds.get_granularity()) output_map_name = "{ba}_{su}".format(ba=base, su=suffix) elif input_strds.get_temporal_type( ) == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) output_map_name = "{ba}_{su}".format(ba=base, su=suffix) else: output_map_name = tgis.create_numeric_suffix( base, count, time_suffix) output_map_id = map.build_id(output_map_name, mapset) output_map = input_strds.get_new_map_instance(output_map_id) # Check if new map is in the temporal database if output_map.is_in_db(dbif): if grass.overwrite(): # Remove the existing temporal database entry output_map.delete(dbif) output_map = input_strds.get_new_map_instance( output_map_id) else: grass.fatal( _("Map <%s> is already registered in the temporal" " database, use overwrite flag to overwrite.") % (output_map.get_map_id())) map_start, map_end = map.get_temporal_extent_as_tuple() if map.is_time_absolute(): output_map.set_absolute_time(map_start, map_end) else: output_map.set_relative_time(map_start, map_end, map.get_relative_time_unit()) limits_vals = limits.split(",") limits_lower = float(limits_vals[0]) limits_upper = float(limits_vals[1]) lower_map_name = None if lower: relations = gran_list_low[i].get_temporal_relations() for relation in limit_relations: if relation in relations: lower_map_name = str(relations[relation][0].get_id()) break upper_map_name = None if upper: relations = gran_list_up[i].get_temporal_relations() for relation in limit_relations: if relation in relations: upper_map_name = str(relations[relation][0].get_id()) break input_map_names = [] for input_map in input_maps: input_map_names.append(input_map.get_id()) # Set up the module accmod = Module("r.series.accumulate", input=input_map_names, output=output_map_name, run_=False) if old_map_name: accmod.inputs["basemap"].value = old_map_name if lower_map_name: accmod.inputs["lower"].value = lower_map_name if upper_map_name: accmod.inputs["upper"].value = upper_map_name accmod.inputs["limits"].value = (limits_lower, limits_upper) if shift: accmod.inputs["shift"].value = float(shift) if scale: accmod.inputs["scale"].value = float(scale) if method: accmod.inputs["method"].value = method print(accmod) accmod.run() if accmod.popen.returncode != 0: dbif.close() grass.fatal(_("Error running r.series.accumulate")) output_maps.append(output_map) old_map_name = output_map_name count += 1 # Increment the cycle start = end if input_strds.is_time_absolute(): start = end if offset: start = tgis.increment_datetime_by_string(end, offset) end = tgis.increment_datetime_by_string(start, cycle) else: if offset: start = end + offset end = start + cycle # Insert the maps into the output space time dataset if output_strds.is_in_db(dbif): if grass.overwrite(): output_strds.delete(dbif) output_strds = input_strds.get_new_instance(out_id) temporal_type, semantic_type, title, description = input_strds.get_initial_values( ) output_strds.set_initial_values(temporal_type, semantic_type, title, description) output_strds.insert(dbif) empty_maps = [] # Register the maps in the database count = 0 for output_map in output_maps: count += 1 if count % 10 == 0: grass.percent(count, len(output_maps), 1) # Read the raster map data output_map.load() # In case of a empty map continue, do not register empty maps if not register_null: if output_map.metadata.get_min() is None and \ output_map.metadata.get_max() is None: empty_maps.append(output_map) continue # Insert map in temporal database output_map.insert(dbif) output_strds.register_map(output_map, dbif) # Update the spatio-temporal extent and the metadata table entries output_strds.update_from_registered_maps(dbif) grass.percent(1, 1, 1) dbif.close() # Remove empty maps if len(empty_maps) > 0: for map in empty_maps: grass.run_command("g.remove", flags='f', type="raster", name=map.get_name(), quiet=True)
def main(options, flags): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options points = options["points"] coordinates = options["coordinates"] strds = options["strds"] output = options["output"] where = options["where"] order = options["order"] layout = options["layout"] null_value = options["null_value"] separator = options["separator"] nprocs = int(options["nprocs"]) write_header = flags["n"] use_stdin = flags["i"] #output_cat_label = flags["f"] #output_color = flags["r"] #output_cat = flags["i"] overwrite = gscript.overwrite() if coordinates and points: gscript.fatal( _("Options coordinates and points are mutually exclusive")) if not coordinates and not points and not use_stdin: gscript.fatal( _("Please specify the coordinates, the points option or use the 'i' flag to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates" )) if use_stdin: coordinates_stdin = str(sys.__stdin__.read()) # Check if coordinates are given with site names or IDs stdin_length = len(coordinates_stdin.split('\n')[0].split()) if stdin_length <= 2: site_input = False elif stdin_length >= 3: site_input = True else: site_input = False # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=dbif) dbif.close() if not maps: gscript.fatal( _("Space time raster dataset <%s> is empty") % sp.get_id()) # Setup separator if separator == "pipe": separator = "|" if separator == "comma": separator = "," if separator == "space": separator = " " if separator == "tab": separator = "\t" if separator == "newline": separator = "\n" # Setup flags are disabled due to test issues flags = "" #if output_cat_label is True: # flags += "f" #if output_color is True: # flags += "r" #if output_cat is True: # flags += "i" # Configure the r.what module if points: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, points=points, overwrite=overwrite, flags=flags, quiet=True) elif coordinates: # Create a list of values coord_list = coordinates.split(",") r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, coordinates=coord_list, overwrite=overwrite, flags=flags, quiet=True) elif use_stdin: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, stdin_=coordinates_stdin, overwrite=overwrite, flags=flags, quiet=True) else: gscript.error(_("Please specify points or coordinates")) if len(maps) < nprocs: nprocs = len(maps) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) num_maps = len(maps) # 400 Maps is the absolute maximum in r.what # We need to determie the number of maps that can be processed # in parallel # First estimate the number of maps per process. We use 400 maps # simultaniously as maximum for a single process num_loops = int(num_maps / (400 * nprocs)) remaining_maps = num_maps % (400 * nprocs) if num_loops == 0: num_loops = 1 remaining_maps = 0 # Compute the number of maps for each process maps_per_loop = int((num_maps - remaining_maps) / num_loops) maps_per_process = int(maps_per_loop / nprocs) remaining_maps_per_loop = maps_per_loop % nprocs # We put the output files in an ordered list output_files = [] output_time_list = [] count = 0 for loop in range(num_loops): file_name = gscript.tempfile() + "_%i" % (loop) count = process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) process_queue.wait() gscript.verbose("Number of raster map layers remaining for sampling %i" % (remaining_maps)) if remaining_maps > 0: # Use a single process if less then 100 maps if remaining_maps <= 100: map_names = [] for i in range(remaining_maps): map = maps[count] map_names.append(map.get_id()) count += 1 mod = copy.deepcopy(r_what) mod(map=map_names, output=file_name) process_queue.put(mod) else: maps_per_process = int(remaining_maps / nprocs) remaining_maps_per_loop = remaining_maps % nprocs file_name = "out_remain" process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) # Wait for unfinished processes process_queue.wait() # Out the output files in the correct order together if layout == "row": one_point_per_row_output(separator, output_files, output_time_list, output, write_header, site_input) elif layout == "col": one_point_per_col_output(separator, output_files, output_time_list, output, write_header, site_input) else: one_point_per_timerow_output(separator, output_files, output_time_list, output, write_header, site_input)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] method = options["method"] quantile = options["quantile"] order = options["order"] where = options["where"] add_time = flags["t"] nulls = flags["n"] # Check if number of methods and output maps matches print((method.split(','))) print(len(list(filter(None, quantile.split(','))))) print((output.split(','))) if (len(list(filter(None, quantile.split(',')))) + len(method.split(','))) != len(output.split(',')): grass.fatal( _('Number requested methods and output maps do not match.')) # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "strds") rows = sp.get_registered_maps("id", where, order, None) if rows: # Create the r.series input file filename = grass.tempfile(True) file = open(filename, 'w') for row in rows: string = "%s\n" % (row["id"]) file.write(string) file.close() flag = "" if len(rows) > 1000: grass.warning( _("Processing over 1000 maps: activating -z flag of r.series which slows down processing" )) flag += "z" if nulls: flag += "n" try: grass.run_command("r.series", flags=flag, file=filename, output=output, overwrite=grass.overwrite(), method=method, quantile=quantile) except CalledModuleError: grass.fatal( _("%s failed. Check above error messages.") % 'r.series') if not add_time: # We need to set the temporal extent from the subset of selected maps maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=None) first_map = maps[0] last_map = maps[-1] start_a, end_a = first_map.get_temporal_extent_as_tuple() start_b, end_b = last_map.get_temporal_extent_as_tuple() if end_b is None: end_b = start_b if first_map.is_time_absolute(): extent = tgis.AbsoluteTemporalExtent(start_time=start_a, end_time=end_b) else: extent = tgis.RelativeTemporalExtent( start_time=start_a, end_time=end_b, unit=first_map.get_relative_time_unit()) for out_map in output.split(','): # Create the time range for the output map if out_map.find("@") >= 0: id = out_map else: mapset = grass.gisenv()["MAPSET"] id = out_map + "@" + mapset map = sp.get_new_map_instance(id) map.load() map.set_temporal_extent(extent=extent) # Register the map in the temporal database if map.is_in_db(): map.update_all() else: map.insert()
def main(): # lazy imports import grass.temporal as tgis # Get the options name = options["input"] type = options["type"] title = options["title"] aggr_type = options["aggr_type"] description = options["description"] semantic = options["semantictype"] update = flags["u"] map_update = flags["m"] # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.open_old_stds(name, type, dbif) update = False if aggr_type and type == "stvds": return () if aggr_type and type != "stvds": stds.metadata.set_aggregation_type(aggregation_type=aggr_type) update = True if title: stds.metadata.set_title(title=title) update = True # Update only non-null entries if description: stds.metadata.set_description(description=description) update = True if semantic: stds.base.set_semantic_type(semantic_type=semantic) update = True if update: stds.update(dbif=dbif) if map_update: #Update the registered maps from the grass spatial database statement = "" # This dict stores the datasets that must be updated dataset_dict = {} count = 0 maps = stds.get_registered_maps_as_objects(dbif=dbif) # We collect the delete and update statements for map in maps: count += 1 if count % 10 == 0: grass.percent(count, len(maps), 1) map.select(dbif=dbif) # Check if the map is present in the grass spatial database # Update if present, delete if not present if map.map_exists(): # Read new metadata from the spatial database map.load() statement += map.update(dbif=dbif, execute=False) else: # Delete the map from the temporal database # We need to update all effected space time datasets datasets = map.get_registered_stds(dbif) if datasets: for dataset in datasets: dataset_dict[dataset] = dataset # Collect the delete statements statement += map.delete(dbif=dbif, update=False, execute=False) # Execute the collected SQL statements dbif.execute_transaction(statement) # Update the effected space time datasets for id in dataset_dict: stds_new = stds.get_new_instance(id) stds_new.select(dbif=dbif) stds_new.update_from_registered_maps(dbif=dbif) if map_update or update: stds.update_from_registered_maps(dbif=dbif) stds.update_command_string(dbif=dbif) dbif.close()
def main(): # Get the options input = options["input"] where = options["where"] columns = options["columns"] tempwhere = options["t_where"] layer = options["layer"] separator = grass.separator(options["separator"]) if where == "" or where == " " or where == "\n": where = None if columns == "" or columns == " " or columns == "\n": columns = None # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "stvds") rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time", tempwhere, "start_time", None) col_names = "" if rows: for row in rows: vector_name = "%s@%s" % (row["name"], row["mapset"]) # In case a layer is defined in the vector dataset, # we override the option layer if row["layer"]: layer = row["layer"] select = grass.read_command("v.db.select", map=vector_name, layer=layer, columns=columns, separator="%s" % (separator), where=where) if not select: grass.fatal( _("Unable to run v.db.select for vector map <%s> " "with layer %s") % (vector_name, layer)) # The first line are the column names list = select.split("\n") count = 0 for entry in list: if entry.strip() != "": # print the column names in case they change if count == 0: col_names_new = "start_time%send_time%s%s" % ( separator, separator, entry) if col_names != col_names_new: col_names = col_names_new print col_names else: if row["end_time"]: print "%s%s%s%s%s" % (row["start_time"], separator, row["end_time"], separator, entry) else: print "%s%s%s%s" % (row["start_time"], separator, separator, entry) count += 1
def main(): # Get the options type = options["type"] temporal_type = options["temporaltype"] columns = options["columns"] order = options["order"] where = options["where"] separator = gscript.separator(options["separator"]) colhead = flags['c'] # Make sure the temporal database exists tgis.init() sp = tgis.dataset_factory(type, None) first = True if gscript.verbosity() > 0: sys.stderr.write("----------------------------------------------\n") for ttype in temporal_type.split(","): if ttype == "absolute": time = "absolute time" else: time = "relative time" stds_list = tgis.get_dataset_list(type, ttype, columns, where, order) # Use the correct order of the mapsets, hence first the current mapset, then # alphabetic ordering mapsets = tgis.get_tgis_c_library_interface().available_mapsets() # Print for each mapset separately for key in mapsets: if key in stds_list.keys(): rows = stds_list[key] if rows: if gscript.verbosity() > 0: if issubclass(sp.__class__, tgis.AbstractMapDataset): sys.stderr.write(_("Time stamped %s maps with %s available in mapset <%s>:\n")%\ (sp.get_type(), time, key)) else: sys.stderr.write(_("Space time %s datasets with %s available in mapset <%s>:\n")%\ (sp.get_new_map_instance(None).get_type(), time, key)) # Print the column names if requested if colhead == True and first == True: output = "" count = 0 for key in rows[0].keys(): if count > 0: output += separator + str(key) else: output += str(key) count += 1 print output first = False for row in rows: output = "" count = 0 for col in row: if count > 0: output += separator + str(col) else: output += str(col) count += 1 print output
def test(): import shutil from core.layerlist import LayerList, Layer from animation.data import AnimLayer from animation.utils import layerListToCmdsMatrix import grass.temporal as tgis tgis.init() layerList = LayerList() layer = AnimLayer() layer.mapType = "strds" layer.name = "JR" layer.cmd = ["d.rast", "map=elev_2007_1m"] layerList.AddLayer(layer) layer = Layer() layer.mapType = "vector" layer.name = "buildings_2009_approx" layer.cmd = ["d.vect", "map=buildings_2009_approx", "color=grey"] layer.opacity = 50 layerList.AddLayer(layer) bPool = BitmapPool() mapFilesPool = MapFilesPool() tempDir = "/tmp/test" if os.path.exists(tempDir): shutil.rmtree(tempDir) os.mkdir(tempDir) # comment this line to keep the directory after prgm ends # cleanUp = CleanUp(tempDir) # import atexit # atexit.register(cleanUp) prov = BitmapProvider(bPool, mapFilesPool, tempDir, imageWidth=640, imageHeight=480) prov.renderingStarted.connect( lambda count: sys.stdout.write("Total number of maps: {c}\n".format(c=count)) ) prov.renderingContinues.connect( lambda current, text: sys.stdout.write( "Current number: {c}\n".format(c=current) ) ) prov.compositionStarted.connect( lambda count: sys.stdout.write( "Composition: total number of maps: {c}\n".format(c=count) ) ) prov.compositionContinues.connect( lambda current, text: sys.stdout.write( "Composition: Current number: {c}\n".format(c=current) ) ) prov.mapsLoaded.connect(lambda: sys.stdout.write("Maps loading finished\n")) cmdMatrix = layerListToCmdsMatrix(layerList) prov.SetCmds(cmdMatrix, [layer.opacity for layer in layerList]) app = wx.App() prov.Load(bgcolor=(13, 156, 230), nprocs=4) for key in bPool.keys(): if key is not None: bPool[key].SaveFile(os.path.join(tempDir, key + ".png"), wx.BITMAP_TYPE_PNG)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] vector_output = options["vector_output"] strds = options["strds"] where = options["where"] columns = options["columns"] if where == "" or where == " " or where == "\n": where = None overwrite = grass.overwrite() # Check the number of sample strds and the number of columns strds_names = strds.split(",") column_names = columns.split(",") if len(strds_names) != len(column_names): grass.fatal( _("The number of columns must be equal to the number of space time raster datasets" )) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() mapset = grass.gisenv()["MAPSET"] out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite) samples = [] first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif) # Single space time raster dataset if len(strds_names) == 1: rows = first_strds.get_registered_maps( columns="name,mapset,start_time,end_time", order="start_time", dbif=dbif) if not rows: dbif.close() grass.fatal( _("Space time raster dataset <%s> is empty") % out_sp.get_id()) for row in rows: start = row["start_time"] end = row["end_time"] raster_maps = [ row["name"] + "@" + row["mapset"], ] s = Sample(start, end, raster_maps) samples.append(s) else: # Multiple space time raster datasets for name in strds_names[1:]: dataset = tgis.open_old_stds(name, "strds", dbif) if dataset.get_temporal_type() != first_strds.get_temporal_type(): grass.fatal( _( "Temporal type of space time raster datasets must be equal\n" "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s" % { "a": first_strds.get_id(), "type_a": first_strds.get_temporal_type(), "b": dataset.get_id(), "type_b": dataset.get_temporal_type(), })) mapmatrizes = tgis.sample_stds_by_stds_topology( "strds", "strds", strds_names, strds_names[0], False, None, "equal", False, False, ) for i in range(len(mapmatrizes[0])): isvalid = True mapname_list = [] for mapmatrix in mapmatrizes: entry = mapmatrix[i] if entry["samples"]: sample = entry["samples"][0] name = sample.get_id() if name is None: isvalid = False break else: mapname_list.append(name) if isvalid: entry = mapmatrizes[0][i] map = entry["granule"] start, end = map.get_temporal_extent_as_tuple() s = Sample(start, end, mapname_list) samples.append(s) num_samples = len(samples) # Get the layer and database connections of the input vector vector_db = grass.vector.vector_db(input) # We copy the vector table and create the new layers if vector_db: # Use the first layer to copy the categories from layers = "1," else: layers = "" first = True for layer in range(num_samples): layer += 1 # Skip existing layer if vector_db and layer in vector_db and vector_db[layer][ "layer"] == layer: continue if first: layers += "%i" % (layer) first = False else: layers += ",%i" % (layer) vectmap = vector_output # We create a new vector map using the categories of the original map try: grass.run_command( "v.category", input=input, layer=layers, output=vectmap, option="transfer", overwrite=overwrite, ) except CalledModuleError: grass.fatal( _("Unable to create new layers for vector map <%s>") % (vectmap)) title = _("Observaion of space time raster dataset(s) <%s>") % (strds) description = _("Observation of space time raster dataset(s) <%s>" " with vector map <%s>") % (strds, input) # Create the output space time vector dataset out_sp = tgis.open_new_stds( output, "stvds", first_strds.get_temporal_type(), title, description, first_strds.get_semantic_type(), dbif, overwrite, ) dummy = out_sp.get_new_map_instance(None) # Sample the space time raster dataset with the vector # map at specific layer with v.what.rast count = 1 for sample in samples: raster_names = sample.raster_names if len(raster_names) != len(column_names): grass.fatal( _("The number of raster maps in a granule must " "be equal to the number of column names")) # Create the columns creation string columns_string = "" for name, column in zip(raster_names, column_names): # The column is by default double precision coltype = "DOUBLE PRECISION" # Get raster map type raster_map = tgis.RasterDataset(name) raster_map.load() if raster_map.metadata.get_datatype() == "CELL": coltype = "INT" tmp_string = "%s %s," % (column, coltype) columns_string += tmp_string # Remove last comma columns_string = columns_string[0:len(columns_string) - 1] # Try to add a column if vector_db and count in vector_db and vector_db[count]["table"]: try: grass.run_command( "v.db.addcolumn", map=vectmap, layer=count, column=columns_string, overwrite=overwrite, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add column %s to vector map <%s> " "with layer %i") % (columns_string, vectmap, count)) else: # Try to add a new table grass.message("Add table to layer %i" % (count)) try: grass.run_command( "v.db.addtable", map=vectmap, layer=count, columns=columns_string, overwrite=overwrite, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add table to vector map " "<%s> with layer %i") % (vectmap, count)) # Call v.what.rast for each raster map for name, column in zip(raster_names, column_names): try: grass.run_command( "v.what.rast", map=vectmap, layer=count, raster=name, column=column, where=where, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.what.rast for vector map <%s> " "with layer %i and raster map <%s>") % (vectmap, count, str(raster_names))) vect = out_sp.get_new_map_instance( dummy.build_id(vectmap, mapset, str(count))) vect.load() start = sample.start end = sample.end if out_sp.is_time_absolute(): vect.set_absolute_time(start, end) else: vect.set_relative_time(start, end, first_strds.get_relative_time_unit()) if vect.is_in_db(dbif): vect.update_all(dbif) else: vect.insert(dbif) out_sp.register_map(vect, dbif) count += 1 out_sp.update_from_registered_maps(dbif) dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] sampler = options["sample"] where = options["where"] base = options["basename"] register_null = flags["n"] method = options["method"] sampling = options["sampling"] offset = options["offset"] nprocs = options["nprocs"] time_suffix = options["suffix"] type = options["type"] topo_list = sampling.split(",") tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds", dbif) sampler_sp = tgis.open_old_stds(sampler, type, dbif) if sampler_sp.get_temporal_type() != sp.get_temporal_type(): dbif.close() gcore.fatal( _("Input and aggregation dataset must have " "the same temporal type")) # Check if intervals are present if sampler_sp.temporal_extent.get_map_time() != "interval": dbif.close() gcore.fatal( _("All registered maps of the aggregation dataset " "must have time intervals")) # We will create the strds later, but need to check here tgis.check_new_stds(output, "strds", dbif, gcore.overwrite()) map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif) if not map_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % input) granularity_list = sampler_sp.get_registered_maps_as_objects( where=where, order="start_time", dbif=dbif) if not granularity_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler) gran = sampler_sp.get_granularity() output_list = tgis.aggregate_by_topology( granularity_list=granularity_list, granularity=gran, map_list=map_list, topo_list=topo_list, basename=base, time_suffix=time_suffix, offset=offset, method=method, nprocs=nprocs, spatial=None, overwrite=gcore.overwrite(), ) if output_list: temporal_type, semantic_type, title, description = sp.get_initial_values( ) output_strds = tgis.open_new_stds( output, "strds", temporal_type, title, description, semantic_type, dbif, gcore.overwrite(), ) tgis.register_map_object_list( "rast", output_list, output_strds, register_null, sp.get_relative_time_unit(), dbif, ) # Update the raster metadata table entries with aggregation type output_strds.set_aggregation_type(method) output_strds.metadata.update(dbif) dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] color = options["color"] raster = options["raster"] volume = options["raster_3d"] rules = options["rules"] remove = flags["r"] write = flags["w"] list = flags["l"] invert = flags["n"] log = flags["g"] abslog = flags["a"] equi = flags["e"] if raster == "": raster=None if volume == "": volume = None if rules == "": rules = None if color == "": color = None # Make sure the temporal database exists tgis.init() sp = tgis.open_old_stds(input, "strds") rows = sp.get_registered_maps("id", None, None, None) if rows: # Create the r.colors input file filename = grass.tempfile(True) file = open(filename, 'w') for row in rows: string = "%s\n" % (row["id"]) file.write(string) file.close() flags_="" if(remove): flags_+="r" if(write): flags_+="w" if(list): flags_+="l" if(invert): flags_+="n" if(log): flags_+="g" if(abslog): flags_+="a" if(equi): flags_+="e" try: grass.run_command("r.colors", flags=flags_, file=filename, color=color, raster=raster, volume=volume, rules=rules, overwrite=grass.overwrite()) except CalledModuleError: grass.fatal(_("Error in r.colors call"))
def main(): # lazy imports import grass.temporal as tgis name = options["input"] type_ = options["type"] shellstyle = flags['g'] system = flags['d'] history = flags['h'] # Make sure the temporal database exists tgis.init() dbif, connection_state_changed = tgis.init_dbif(None) rows = tgis.get_tgis_metadata(dbif) if system and not shellstyle and not history: # 0123456789012345678901234567890 print( " +------------------- Temporal DBMI backend information ----------------------+" ) print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__)) print(" | Temporal database string:... " + str(tgis.get_tgis_database_string())) print(" | SQL template path:.......... " + str(tgis.get_sql_template_path())) if rows: for row in rows: print(" | %s .......... %s" % (row[0], row[1])) print( " +----------------------------------------------------------------------------+" ) return elif system and not history: print("dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'") print("dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'") print("sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'") if rows: for row in rows: print("%s=\'%s\'" % (row[0], row[1])) return if not system and not name: grass.fatal(_("Please specify %s=") % ("name")) if name.find("@") >= 0: id_ = name else: id_ = name + "@" + grass.gisenv()["MAPSET"] dataset = tgis.dataset_factory(type_, id_) if not dataset.is_in_db(dbif): grass.fatal(_("Dataset <%s> not found in temporal database") % (id_)) dataset.select(dbif) if history and type_ in ["strds", "stvds", "str3ds"]: dataset.print_history() return if shellstyle: dataset.print_shell_info() else: dataset.print_info()
def setUpClass(cls): """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() cls.runModule("g.region", n=30.0, s=0.0, e=30.0, w=0.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a1 = 1") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b1 = 1") cls.runModule("g.region", n=50.0, s=20.0, e=50.0, w=20.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a2 = 2") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b2 = 2") cls.runModule("g.region", n=40.0, s=25.0, e=40.0, w=25.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a3 = 3") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b3 = 3") cls.runModule("g.region", n=60.0, s=40.0, e=60.0, w=40.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a4 = 4") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b4 = 4") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="singletmap = 100") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="singlemap = 1000") tgis.open_new_stds( name="A", type="strds", temporaltype="absolute", title="A", descr="A", semantic="field", overwrite=True, ) tgis.open_new_stds( name="B", type="strds", temporaltype="absolute", title="B", descr="B", semantic="field", overwrite=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="A", maps="a1,a2,a3,a4", start="2001-01-01", interval=False, ) tgis.register_maps_in_space_time_dataset( type="raster", name="B", maps="b1,b2,b3,b4", start="2001-01-01", interval=False, ) tgis.register_maps_in_space_time_dataset(type="raster", name=None, maps="singletmap", start="2001-01-01")
def main(): # Get the options input = options["input"] base = options["basename"] where = options["where"] nprocs = options["nprocs"] mapset = grass.gisenv()["MAPSET"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds") maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif) num = len(maps) gap_list = [] overwrite_flags = {} # Identify all gaps and create new names count = 0 for _map in maps: if _map.get_id() is None: count += 1 _id = "%s_%d@%s" % (base, num + count, mapset) _map.set_id(_id) overwrite_flags[_id] = False if _map.map_exists() or _map.is_in_db(dbif): if not grass.overwrite: grass.fatal( _("Map with name <%s> already exists. " "Please use another base name." % (_id))) else: if _map.is_in_db(dbif): overwrite_flags[_id] = True gap_list.append(_map) if len(gap_list) == 0: grass.message(_("No gaps found")) return # Build the temporal topology tb = tgis.SpatioTemporalTopologyBuilder() tb.build(maps) # Do some checks before computation for _map in gap_list: if not _map.get_precedes() or not _map.get_follows(): grass.fatal( _("Unable to determine successor " "and predecessor of a gap.")) if len(_map.get_precedes()) > 1: grass.warning( _("More than one successor of the gap found. " "Using the first found.")) if len(_map.get_follows()) > 1: grass.warning( _("More than one predecessor of the gap found. " "Using the first found.")) # Interpolate the maps using parallel processing proc_list = [] proc_count = 0 num = len(gap_list) for _map in gap_list: predecessor = _map.get_follows()[0] successor = _map.get_precedes()[0] # Build the module inputs strings inputs = "%s,%s" % (predecessor.get_map_id(), successor.get_map_id()) dpos = "0,1" output = "%s" % (_map.get_name()) outpos = "0.5" # Start several processes in parallel proc_list.append( Process(target=run_interp, args=(inputs, dpos, output, outpos))) proc_list[proc_count].start() proc_count += 1 if proc_count == nprocs or proc_count == num: proc_count = 0 exitcodes = 0 for proc in proc_list: proc.join() exitcodes += proc.exitcode if exitcodes != 0: dbif.close() grass.fatal(_("Error while interpolation computation")) # Empty process list proc_list = [] # Insert new interpolated maps in temporal database and dataset for _map in gap_list: id = _map.get_id() if overwrite_flags[id] == True: if _map.is_time_absolute(): start, end = _map.get_absolute_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_absolute_time(start, end) else: start, end, unit = _map.get_relative_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_relative_time(start, end, unit) _map.load() _map.insert(dbif) sp.register_map(_map, dbif) sp.update_from_registered_maps(dbif) sp.update_command_string(dbif=dbif) dbif.close()
def __init__(self, parent, giface, title=_("Animation Tool"), rasters=None, timeseries=None): wx.Frame.__init__(self, parent, title=title, style=wx.DEFAULT_FRAME_STYLE, size=(800, 600)) self._giface = giface self.SetClientSize(self.GetSize()) self.iconsize = (16, 16) self.SetIcon( wx.Icon(os.path.join(globalvar.ICONDIR, "grass_map.ico"), wx.BITMAP_TYPE_ICO)) # Make sure the temporal database exists try: tgis.init() except FatalError as e: GWarning(parent=self, message=str(e)) # create temporal directory and ensure it's deleted after programs ends # (stored in MAPSET/.tmp/) global TMP_DIR TMP_DIR = gcore.tempdir() self.animations = [Animation() for i in range(MAX_COUNT)] self.windows = [] self.animationPanel = AnimationsPanel(self, self.windows, initialCount=MAX_COUNT) bitmapPool = BitmapPool() mapFilesPool = MapFilesPool() self._progressDlg = None self._progressDlgMax = None self.provider = BitmapProvider(bitmapPool=bitmapPool, mapFilesPool=mapFilesPool, tempDir=TMP_DIR) self.animationSliders = {} self.animationSliders["nontemporal"] = SimpleAnimationSlider(self) self.animationSliders["temporal"] = TimeAnimationSlider(self) self.controller = AnimationController( frame=self, sliders=self.animationSliders, animations=self.animations, mapwindows=self.windows, provider=self.provider, bitmapPool=bitmapPool, mapFilesPool=mapFilesPool, ) for win in self.windows: win.Bind(wx.EVT_SIZE, self.FrameSizeChanged) self.provider.mapsLoaded.connect(lambda: self.SetStatusText("")) self.provider.renderingStarted.connect(self._showRenderingProgress) self.provider.renderingContinues.connect(self._updateProgress) self.provider.renderingFinished.connect(self._closeProgress) self.provider.compositionStarted.connect( self._showRenderingProgress) self.provider.compositionContinues.connect(self._updateProgress) self.provider.compositionFinished.connect(self._closeProgress) self.InitStatusbar() self._mgr = wx.aui.AuiManager(self) # toolbars self.toolbars = {} self._addToolbars() self._addPanes() self._mgr.Update() self.dialogs = dict() self.dialogs["speed"] = None self.dialogs["preferences"] = None self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def main(): # Get the options input = options["input"] output = options["output"] strds = options["strds"] tempwhere = options["t_where"] where = options["where"] methods = options["method"] percentile = options["percentile"] overwrite = grass.overwrite() quiet = True if grass.verbosity() > 2: quiet = False if where == "" or where == " " or where == "\n": where = None # Check the number of sample strds and the number of columns strds_names = strds.split(",") # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() samples = [] first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif) # Single space time raster dataset if len(strds_names) == 1: granu = first_strds.get_granularity() rows = first_strds.get_registered_maps( "name,mapset,start_time,end_time", tempwhere, "start_time", dbif) if not rows: dbif.close() grass.fatal( _("Space time raster dataset <%s> is empty") % first_strds.get_id()) for row in rows: start = row["start_time"] end = row["end_time"] raster_maps = [ row["name"] + "@" + row["mapset"], ] s = Sample(start, end, raster_maps, first_strds.get_name(), granu) samples.append(s) else: # Multiple space time raster datasets for name in strds_names[1:]: dataset = tgis.open_old_stds(name, "strds", dbif) if dataset.get_temporal_type() != first_strds.get_temporal_type(): grass.fatal( _( "Temporal type of space time raster " "datasets must be equal\n<%(a)s> of type " "%(type_a)s do not match <%(b)s> of type " "%(type_b)s" % { "a": first_strds.get_id(), "type_a": first_strds.get_temporal_type(), "b": dataset.get_id(), "type_b": dataset.get_temporal_type(), })) mapmatrizes = tgis.sample_stds_by_stds_topology( "strds", "strds", strds_names, strds_names[0], False, None, "equal", False, False, ) # TODO check granularity for multiple STRDS for i in range(len(mapmatrizes[0])): isvalid = True mapname_list = [] for mapmatrix in mapmatrizes: entry = mapmatrix[i] if entry["samples"]: sample = entry["samples"][0] name = sample.get_id() if name is None: isvalid = False break else: mapname_list.append(name) if isvalid: entry = mapmatrizes[0][i] map = entry["granule"] start, end = map.get_temporal_extent_as_tuple() s = Sample(start, end, mapname_list, name) samples.append(s) # Get the layer and database connections of the input vector if where: try: grass.run_command("v.extract", input=input, where=where, output=output) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.extract for vector map" " <%s> and where <%s>") % (input, where)) else: gcopy(input, output, "vector") msgr = Messenger() perc_curr = 0 perc_tot = len(samples) pymap = Vector(output) try: pymap.open("r") except: dbif.close() grass.fatal(_("Unable to create vector map <%s>" % output)) pymap.close() for sample in samples: raster_names = sample.raster_names # Call v.what.rast for each raster map for name in raster_names: day = sample.printDay() column_name = "%s_%s" % (sample.strds_name, day) try: grass.run_command( "v.rast.stats", map=output, raster=name, column=column_name, method=methods, percentile=percentile, quiet=quiet, overwrite=overwrite, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.what.rast for vector map" " <%s> and raster map <%s>") % (output, name)) msgr.percent(perc_curr, perc_tot, 1) perc_curr += 1 dbif.close()
def main(): options, flags = gscript.parser() # import wx only after running parser # to avoid issues when only interface is needed import grass.temporal as tgis import wx from grass.script.setup import set_gui_path set_gui_path() from core.giface import StandaloneGrassInterface from core.layerlist import LayerList from animation.frame import AnimationFrame, MAX_COUNT from animation.data import AnimLayer rast = options["raster"] vect = options["vector"] strds = options["strds"] stvds = options["stvds"] numInputs = 0 if rast: numInputs += 1 if vect: numInputs += 1 if strds: numInputs += 1 if stvds: numInputs += 1 if numInputs > 1: gscript.fatal( _("%s=, %s=, %s= and %s= are mutually exclusive.") % ("raster", "vector", "strds", "stvds")) if numInputs > 0: # We need to initialize the temporal framework in case # a space time dataset was set on the command line so that # the AnimLayer() class works correctly try: tgis.init() except FatalError as e: print(e) layerList = LayerList() if rast: layer = AnimLayer() layer.mapType = "raster" layer.name = rast layer.cmd = ["d.rast", "map={name}".format(name=rast.split(",")[0])] layerList.AddLayer(layer) if vect: layer = AnimLayer() layer.mapType = "vector" layer.name = vect layer.cmd = ["d.vect", "map={name}".format(name=vect.split(",")[0])] layerList.AddLayer(layer) if strds: layer = AnimLayer() layer.mapType = "strds" layer.name = strds layer.cmd = ["d.rast", "map="] layerList.AddLayer(layer) if stvds: layer = AnimLayer() layer.mapType = "stvds" layer.name = stvds layer.cmd = ["d.vect", "map="] layerList.AddLayer(layer) app = wx.App() frame = AnimationFrame( parent=None, giface=StandaloneGrassInterface(), title=_("Animation Tool - GRASS GIS"), ) frame.CentreOnScreen() frame.Show() if len(layerList) >= 1: # CallAfter added since it was crashing with wxPython 3 gtk wx.CallAfter(frame.SetAnimations, [layerList] + [None] * (MAX_COUNT - 1)) app.MainLoop()
def main(): # Get the options input = options["input"] timestamp_column = options["timestamp_column"] columns = options["column"] layer = options["layer"] where = options["where"] strds = options["strds"] tempwhere = options["t_where"] i_flag = flags["i"] if where == "" or where == " " or where == "\n": where = None # overwrite = grass.overwrite() # Set verbosity level # quiet = True # if grass.verbosity() > 2: # quiet = False # Check DB connection for input vector map dbcon = grass.vector_layer_db(input, layer) # Check the number of sample strds and the number of columns strds_names = strds.split(",") column_names = columns.split(",") if not len(column_names) == len(strds_names): grass.fatal(_('Number of columns and number of STRDS does not match.')) # Check type of timestamp column cols = grass.vector_columns(input, layer=layer) if timestamp_column not in cols.keys(): grass.fatal(_('Could not find column {} \ in table connected to vector map {} \ at layer {}'.format(timestamp_column, input, layer))) if cols[timestamp_column]['type'] != 'DATE': if dbcon['driver'] != 'sqlite': # Note that SQLite does not have a DATE datatype and # and an index does not significantly speedup the process # (at least not with a couple of 100 points) grass.warning(_('Timestamp column is of type {}. \ It is recommended to use DATE type with an index. \ '.format(cols[timestamp_column]['type']))) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() # Limit temporal extent to extent of points if no tempwhere is given if not tempwhere: extent = [] for stat in ('min', 'max'): tsql = "SELECT {}({}) FROM {}".format(stat, timestamp_column, dbcon['table']) extent.append(grass.read_command('db.select', flags='c', sql=tsql)) grass.verbose(_('Temporal extent of vector points map is \ {} to {}'.format(extent[0], extent[1]))) else: tempwhere = '({}) AND '.format(tempwhere) # Loop over STRDS counter = 0 for strds_name in strds_names: cur_strds = tgis.open_old_stds(strds_name, "strds", dbif) # skip current STRDS if no map is registered in it if cur_strds.metadata.get_number_of_maps() is None: grass.warning(_( 'Space time raster dataset {} does not contain any registered ' 'map. It is being skipped.'.format(cur_strds.get_id()))) counter += 1 continue granu = cur_strds.get_granularity() start_time = tgis.datetime_math.check_datetime_string(extent[0]) start_gran = tgis.datetime_math.adjust_datetime_to_granularity(start_time, granu).isoformat() tempwhere += "(end_time > '{}' and start_time <= '{}')".format(start_gran, extent[1]) # needs to be set properly # Get info on registered maps in STRDS rows = cur_strds.get_registered_maps("name,mapset,start_time,end_time", tempwhere, "start_time", dbif) # Check temporal type and # define sampling function to use # becomes relevant when temporal type relative gets implemented if cur_strds.is_time_relative(): grass.fatal(_('Sorry, STRDS of relative temporal type is not (yet) supported')) sample = sample_relative else: sample = sample_absolute # Check if there are raster maps to sample from that fullfill # temporal conditions if not rows and tempwhere: dbif.close() grass.fatal(_("No maps selected from Space time raster dataset " "{}".format(cur_strds.get_id()))) # Include temporal condition into where clause where_clause = '({}) AND '.format(where) if where else '' # Loop over registered maps in STRDS row_number = 0 for row in rows: # If r.what had a where option, r.what could be used to # collect raster values (without interpolation) # in a ParallelModuleQueue to collect values using multiple # cores and then upload results in one operation sample(input, layer, timestamp_column, column_names[counter], row, where_clause, i_flag) row_number += 1 grass.percent(row_number, len(rows), 3) counter = counter + 1 dbif.close() grass.vector_history(input)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] where = options["where"] gran = options["granularity"] base = options["basename"] register_null = flags["n"] method = options["method"] sampling = options["sampling"] offset = options["offset"] nprocs = options["nprocs"] file_limit = options["file_limit"] time_suffix = options["suffix"] topo_list = sampling.split(",") tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds", dbif) map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif) if not map_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % input) # We will create the strds later, but need to check here tgis.check_new_stds(output, "strds", dbif, gcore.overwrite()) start_time = map_list[0].temporal_extent.get_start_time() if sp.is_time_absolute(): start_time = tgis.adjust_datetime_to_granularity(start_time, gran) # We use the end time first end_time = map_list[-1].temporal_extent.get_end_time() has_end_time = True # In case no end time is available, then we use the start time of the last map layer if end_time is None: end_time = map_list[-1].temporal_extent.get_start_time() has_end_time = False granularity_list = [] # Build the granularity list while True: if has_end_time is True: if start_time >= end_time: break else: if start_time > end_time: break granule = tgis.RasterDataset(None) start = start_time if sp.is_time_absolute(): end = tgis.increment_datetime_by_string(start_time, gran) granule.set_absolute_time(start, end) else: end = start_time + int(gran) granule.set_relative_time(start, end, sp.get_relative_time_unit()) start_time = end granularity_list.append(granule) output_list = tgis.aggregate_by_topology(granularity_list=granularity_list, granularity=gran, map_list=map_list, topo_list=topo_list, basename=base, time_suffix=time_suffix, offset=offset, method=method, nprocs=nprocs, spatial=None, overwrite=gcore.overwrite(), file_limit=file_limit) if output_list: temporal_type, semantic_type, title, description = sp.get_initial_values( ) output_strds = tgis.open_new_stds(output, "strds", temporal_type, title, description, semantic_type, dbif, gcore.overwrite()) if register_null: register_null = False else: register_null = True tgis.register_map_object_list("rast", output_list, output_strds, register_null, sp.get_relative_time_unit(), dbif) # Update the raster metadata table entries with aggregation type output_strds.set_aggregation_type(method) output_strds.metadata.update(dbif) dbif.close()
def setUpClass(cls): """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() cls.runModule("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0) cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a1 = 1") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a2 = 2") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a3 = 3") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="a4 = 4") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b1 = 5") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="b2 = 6") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="c1 = 7") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d1 = 8") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d2 = 9") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="d3 = 10") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="e1 = 11") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="e2 = 12") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="e3 = 13") cls.runModule("r.mapcalc", overwrite=True, quiet=True, expression="singletmap = 99") tgis.open_new_stds( name="A", type="strds", temporaltype="absolute", title="A", descr="A", semantic="field", overwrite=True, ) tgis.open_new_stds( name="B", type="strds", temporaltype="absolute", title="B", descr="B", semantic="field", overwrite=True, ) tgis.open_new_stds( name="C", type="strds", temporaltype="absolute", title="C", descr="C", semantic="field", overwrite=True, ) tgis.open_new_stds( name="D", type="strds", temporaltype="absolute", title="D", descr="D", semantic="field", overwrite=True, ) tgis.open_new_stds( name="E", type="strds", temporaltype="absolute", title="E", descr="E", semantic="field", overwrite=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="A", maps="a1,a2,a3,a4", start="2001-01-01", increment="1 day", interval=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="B", maps="b1,b2", start="2001-01-01", increment="2 day", interval=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="C", maps="c1", start="2001-01-02", increment="2 day", interval=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="D", maps="d1,d2,d3", start="2001-01-03", increment="1 day", interval=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name="E", maps="e1,e2,e3", start="2000-12-31", increment="2 day", interval=True, ) tgis.register_maps_in_space_time_dataset( type="raster", name=None, maps="singletmap", start="2001-01-03", end="2001-01-04", )
def main(): # lazy imports import grass.temporal as tgis from grass.pygrass.utils import copy as gcopy from grass.pygrass.messages import Messenger from grass.pygrass.vector import Vector # Get the options input = options["input"] output = options["output"] strds = options["strds"] where = options["where"] tempwhere = options["t_where"] if output and flags['u']: grass.fatal(_("Cannot combine 'output' option and 'u' flag")) elif not output and not flags['u']: grass.fatal(_("'output' option or 'u' flag must be given")) elif not output and flags['u']: grass.warning( _("Attribute table of vector {name} will be updated...").format( name=input)) if where == "" or where == " " or where == "\n": where = None overwrite = grass.overwrite() quiet = True if grass.verbosity() > 2: quiet = False # Check the number of sample strds and the number of columns strds_names = strds.split(",") # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() samples = [] first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif) # Single space time raster dataset if len(strds_names) == 1: granu = first_strds.get_granularity() rows = first_strds.get_registered_maps( "name,mapset,start_time,end_time", tempwhere, "start_time", dbif) if not rows: dbif.close() grass.fatal( _("Space time raster dataset <%s> is empty") % first_strds.get_id()) for row in rows: start = row["start_time"] end = row["end_time"] raster_maps = [ row["name"] + "@" + row["mapset"], ] s = Sample(start, end, raster_maps, first_strds.get_name(), granu) samples.append(s) else: # Multiple space time raster datasets for name in strds_names[1:]: dataset = tgis.open_old_stds(name, "strds", dbif) if dataset.get_temporal_type() != first_strds.get_temporal_type(): grass.fatal( _( "Temporal type of space time raster " "datasets must be equal\n<%(a)s> of type " "%(type_a)s do not match <%(b)s> of type " "%(type_b)s" % { "a": first_strds.get_id(), "type_a": first_strds.get_temporal_type(), "b": dataset.get_id(), "type_b": dataset.get_temporal_type() })) mapmatrizes = tgis.sample_stds_by_stds_topology( "strds", "strds", strds_names, strds_names[0], False, None, "equal", False, False) #TODO check granularity for multiple STRDS for i in range(len(mapmatrizes[0])): isvalid = True mapname_list = [] for mapmatrix in mapmatrizes: entry = mapmatrix[i] if entry["samples"]: sample = entry["samples"][0] name = sample.get_id() if name is None: isvalid = False break else: mapname_list.append(name) if isvalid: entry = mapmatrizes[0][i] map = entry["granule"] start, end = map.get_temporal_extent_as_tuple() s = Sample(start, end, mapname_list, name) samples.append(s) # Get the layer and database connections of the input vector if output: gcopy(input, output, 'vector') else: output = input msgr = Messenger() perc_curr = 0 perc_tot = len(samples) pymap = Vector(output) try: pymap.open('r') except: dbif.close() grass.fatal(_("Unable to create vector map <%s>" % output)) if len(pymap.dblinks) == 0: try: pymap.close() grass.run_command("v.db.addtable", map=output) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add table <%s> to vector map <%s>" % output)) if pymap.is_open(): pymap.close() for sample in samples: raster_names = sample.raster_names # Call v.what.rast for each raster map for name in raster_names: coltype = "DOUBLE PRECISION" # Get raster map type raster_map = tgis.RasterDataset(name) raster_map.load() if raster_map.metadata.get_datatype() == "CELL": coltype = "INT" day = sample.printDay() column_name = "%s_%s" % (sample.strds_name, day) column_string = "%s %s" % (column_name, coltype) column_string.replace('.', '_') try: grass.run_command("v.db.addcolumn", map=output, column=column_string, overwrite=overwrite) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add column %s to vector map " "<%s> ") % (column_string, output)) try: grass.run_command("v.what.rast", map=output, raster=name, column=column_name, where=where, quiet=quiet) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.what.rast for vector map" " <%s> and raster map <%s>") % (output, str(raster_names))) msgr.percent(perc_curr, perc_tot, 1) perc_curr += 1 dbif.close()
def main(): options, flags = grass.parser() # required elevation_input = options['elevation'] aspect_input = options['aspect'] slope_input = options['slope'] # optional latitude = options['lat'] longitude = options['long'] linke_input = options['linke'] linke_value = options['linke_value'] albedo_input = options['albedo'] albedo_value = options['albedo_value'] horizon_basename = options['horizon_basename'] horizon_step = options['horizon_step'] # outputs beam_rad = options['beam_rad'] diff_rad = options['diff_rad'] refl_rad = options['refl_rad'] glob_rad = options['glob_rad'] insol_time = options['insol_time'] beam_rad_basename = beam_rad_basename_user = options['beam_rad_basename'] diff_rad_basename = diff_rad_basename_user = options['diff_rad_basename'] refl_rad_basename = refl_rad_basename_user = options['refl_rad_basename'] glob_rad_basename = glob_rad_basename_user = options['glob_rad_basename'] insol_time_basename = insol_time_basename_user = options[ 'insol_time_basename'] # missing output? if not any([ beam_rad, diff_rad, refl_rad, glob_rad, insol_time, beam_rad_basename, diff_rad_basename, refl_rad_basename, glob_rad_basename, insol_time_basename ]): grass.fatal(_("No output specified.")) start_day = int(options['start_day']) end_day = int(options['end_day']) day_step = int(options['day_step']) if day_step > 1 and (beam_rad or diff_rad or refl_rad or glob_rad or insol_time): grass.fatal( _("Day step higher then 1 would produce" " meaningless cumulative maps.")) # check: start < end if start_day > end_day: grass.fatal(_("Start day is after end day.")) if day_step >= end_day - start_day: grass.fatal(_("Day step is too big.")) step = float(options['step']) nprocs = int(options['nprocs']) solar_constant = float( options['solar_constant']) if options['solar_constant'] else None if solar_constant: # check it's newer version of r.sun if not module_has_parameter('r.sun', 'solar_constant'): grass.warning( _("This version of r.sun lacks solar_constant option, " "it will be ignored. Use newer version of r.sun.")) solar_constant = None if beam_rad and not beam_rad_basename: beam_rad_basename = create_tmp_map_name('beam_rad') MREMOVE.append(beam_rad_basename) if diff_rad and not diff_rad_basename: diff_rad_basename = create_tmp_map_name('diff_rad') MREMOVE.append(diff_rad_basename) if refl_rad and not refl_rad_basename: refl_rad_basename = create_tmp_map_name('refl_rad') MREMOVE.append(refl_rad_basename) if glob_rad and not glob_rad_basename: glob_rad_basename = create_tmp_map_name('glob_rad') MREMOVE.append(glob_rad_basename) if insol_time and not insol_time_basename: insol_time_basename = create_tmp_map_name('insol_time') MREMOVE.append(insol_time_basename) # check for existing identical map names if not grass.overwrite(): check_daily_map_names(beam_rad_basename, grass.gisenv()['MAPSET'], start_day, end_day, day_step) check_daily_map_names(diff_rad_basename, grass.gisenv()['MAPSET'], start_day, end_day, day_step) check_daily_map_names(refl_rad_basename, grass.gisenv()['MAPSET'], start_day, end_day, day_step) check_daily_map_names(glob_rad_basename, grass.gisenv()['MAPSET'], start_day, end_day, day_step) check_daily_map_names(insol_time_basename, grass.gisenv()['MAPSET'], start_day, end_day, day_step) # check for slope/aspect if not aspect_input or not slope_input: params = {} if not aspect_input: aspect_input = create_tmp_map_name('aspect') params.update({'aspect': aspect_input}) REMOVE.append(aspect_input) if not slope_input: slope_input = create_tmp_map_name('slope') params.update({'slope': slope_input}) REMOVE.append(slope_input) grass.info(_("Running r.slope.aspect...")) grass.run_command('r.slope.aspect', elevation=elevation_input, quiet=True, **params) if beam_rad: grass.mapcalc('{beam} = 0'.format(beam=beam_rad), quiet=True) if diff_rad: grass.mapcalc('{diff} = 0'.format(diff=diff_rad), quiet=True) if refl_rad: grass.mapcalc('{refl} = 0'.format(refl=refl_rad), quiet=True) if glob_rad: grass.mapcalc('{glob} = 0'.format(glob=glob_rad), quiet=True) if insol_time: grass.mapcalc('{insol} = 0'.format(insol=insol_time), quiet=True) rsun_flags = '' if flags['m']: rsun_flags += 'm' if flags['p']: rsun_flags += 'p' grass.info(_("Running r.sun in a loop...")) count = 0 # Parallel processing proc_list = [] proc_count = 0 suffixes_all = [] days = range(start_day, end_day + 1, day_step) num_days = len(days) core.percent(0, num_days, 1) for day in days: count += 1 core.percent(count, num_days, 10) suffix = '_' + format_order(day) proc_list.append( Process(target=run_r_sun, args=(elevation_input, aspect_input, slope_input, latitude, longitude, linke_input, linke_value, albedo_input, albedo_value, horizon_basename, horizon_step, solar_constant, day, step, beam_rad_basename, diff_rad_basename, refl_rad_basename, glob_rad_basename, insol_time_basename, suffix, rsun_flags))) proc_list[proc_count].start() proc_count += 1 suffixes_all.append(suffix) if proc_count == nprocs or proc_count == num_days or count == num_days: proc_count = 0 exitcodes = 0 for proc in proc_list: proc.join() exitcodes += proc.exitcode if exitcodes != 0: core.fatal(_("Error while r.sun computation")) # Empty process list proc_list = [] if beam_rad: sum_maps(beam_rad, beam_rad_basename, suffixes_all) if diff_rad: sum_maps(diff_rad, diff_rad_basename, suffixes_all) if refl_rad: sum_maps(refl_rad, refl_rad_basename, suffixes_all) if glob_rad: sum_maps(glob_rad, glob_rad_basename, suffixes_all) if insol_time: sum_maps(insol_time, insol_time_basename, suffixes_all) # FIXME: how percent really works? # core.percent(1, 1, 1) # set color table if beam_rad: set_color_table([beam_rad]) if diff_rad: set_color_table([diff_rad]) if refl_rad: set_color_table([refl_rad]) if glob_rad: set_color_table([glob_rad]) if insol_time: set_color_table([insol_time]) if not any([ beam_rad_basename_user, diff_rad_basename_user, refl_rad_basename_user, glob_rad_basename_user, insol_time_basename_user ]): return 0 # add timestamps and register to spatio-temporal raster data set temporal = flags['t'] if temporal: core.info(_("Registering created maps into temporal dataset...")) import grass.temporal as tgis def registerToTemporal(basename, suffixes, mapset, start_day, day_step, title, desc): """ Register daily output maps in spatio-temporal raster data set """ maps = ','.join( [basename + suf + '@' + mapset for suf in suffixes]) tgis.open_new_stds(basename, type='strds', temporaltype='relative', title=title, descr=desc, semantic='sum', dbif=None, overwrite=grass.overwrite()) tgis.register_maps_in_space_time_dataset(type='rast', name=basename, maps=maps, start=start_day, end=None, unit='days', increment=day_step, dbif=None, interval=False) # Make sure the temporal database exists tgis.init() mapset = grass.gisenv()['MAPSET'] if beam_rad_basename_user: registerToTemporal( beam_rad_basename, suffixes_all, mapset, start_day, day_step, title="Beam irradiation", desc="Output beam irradiation raster maps [Wh.m-2.day-1]") if diff_rad_basename_user: registerToTemporal( diff_rad_basename, suffixes_all, mapset, start_day, day_step, title="Diffuse irradiation", desc="Output diffuse irradiation raster maps [Wh.m-2.day-1]") if refl_rad_basename_user: registerToTemporal( refl_rad_basename, suffixes_all, mapset, start_day, day_step, title="Reflected irradiation", desc="Output reflected irradiation raster maps [Wh.m-2.day-1]") if glob_rad_basename_user: registerToTemporal( glob_rad_basename, suffixes_all, mapset, start_day, day_step, title="Total irradiation", desc="Output total irradiation raster maps [Wh.m-2.day-1]") if insol_time_basename_user: registerToTemporal(insol_time_basename, suffixes_all, mapset, start_day, day_step, title="Total insolation", desc="Output total insolation raster maps [h]") # just add timestamps, don't register else: for i, day in enumerate(days): if beam_rad_basename_user: set_time_stamp(beam_rad_basename + suffixes_all[i], day=day) if diff_rad_basename_user: set_time_stamp(diff_rad_basename + suffixes_all[i], day=day) if refl_rad_basename_user: set_time_stamp(refl_rad_basename + suffixes_all[i], day=day) if glob_rad_basename_user: set_time_stamp(glob_rad_basename + suffixes_all[i], day=day) if insol_time_basename_user: set_time_stamp(insol_time_basename + suffixes_all[i], day=day) # set color table for daily maps if beam_rad_basename_user: maps = [beam_rad_basename + suf for suf in suffixes_all] set_color_table(maps) if diff_rad_basename_user: maps = [diff_rad_basename + suf for suf in suffixes_all] set_color_table(maps) if refl_rad_basename_user: maps = [refl_rad_basename + suf for suf in suffixes_all] set_color_table(maps) if glob_rad_basename_user: maps = [glob_rad_basename + suf for suf in suffixes_all] set_color_table(maps) if insol_time_basename_user: maps = [insol_time_basename + suf for suf in suffixes_all] set_color_table(maps)
def setUpClass(cls): """Initiate the temporal GIS and set the region """ tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() cls.runModule("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0) cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=1, output='a1') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=1, output='a2') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=1, output='a3') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=1, output='a4') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=2, output='b1') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=2, output='b2') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=3, output='c1') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=4, output='d1') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=4, output='d2') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=4, output='d3') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=5, output='singletmap') cls.runModule("v.random", overwrite=True, quiet=True, npoints=20, seed=6, output='singlemap') tgis.open_new_stds(name="A", type="stvds", temporaltype="absolute", title="A", descr="A", semantic="field", overwrite=True) tgis.open_new_stds(name="B", type="stvds", temporaltype="absolute", title="B", descr="B", semantic="field", overwrite=True) tgis.open_new_stds(name="C", type="stvds", temporaltype="absolute", title="B", descr="C", semantic="field", overwrite=True) tgis.open_new_stds(name="D", type="stvds", temporaltype="absolute", title="D", descr="D", semantic="field", overwrite=True) tgis.register_maps_in_space_time_dataset(type="vector", name="A", maps="a1,a2,a3,a4", start="2001-01-01", increment="1 day", interval=True) tgis.register_maps_in_space_time_dataset(type="vector", name="B", maps="b1,b2", start="2001-01-01", increment="2 day", interval=True) tgis.register_maps_in_space_time_dataset(type="vector", name="C", maps="c1", start="2001-01-02", increment="2 day", interval=True) tgis.register_maps_in_space_time_dataset(type="vector", name="D", maps="d1,d2,d3", start="2001-01-03", increment="1 day", interval=True) tgis.register_maps_in_space_time_dataset(type="vector", name=None, maps="singletmap", start="2001-01-03", end="2001-01-04")
def main(): # Get the options input = options["input"] base = options["basename"] where = options["where"] nprocs = options["nprocs"] mapset = grass.gisenv()["MAPSET"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds") maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif) num = len(maps) # Configure the r.to.vect module gapfill_module = pymod.Module( "r.series.interp", overwrite=grass.overwrite(), quiet=True, run_=False, finish_=False, ) process_queue = pymod.ParallelModuleQueue(int(nprocs)) gap_list = [] overwrite_flags = {} # Identify all gaps and create new names count = 0 for _map in maps: if _map.get_id() is None: count += 1 _id = "%s_%d@%s" % (base, num + count, mapset) _map.set_id(_id) gap_list.append(_map) if len(gap_list) == 0: grass.message(_("No gaps found")) return # Build the temporal topology tb = tgis.SpatioTemporalTopologyBuilder() tb.build(maps) # Do some checks before computation for _map in gap_list: if not _map.get_precedes() or not _map.get_follows(): grass.fatal( _("Unable to determine successor " "and predecessor of a gap.")) if len(_map.get_precedes()) > 1: grass.warning( _("More than one successor of the gap found. " "Using the first found.")) if len(_map.get_follows()) > 1: grass.warning( _("More than one predecessor of the gap found. " "Using the first found.")) # Interpolate the maps using parallel processing result_list = [] for _map in gap_list: predecessor = _map.get_follows()[0] successor = _map.get_precedes()[0] gran = sp.get_granularity() tmpval, start = predecessor.get_temporal_extent_as_tuple() end, tmpval = successor.get_temporal_extent_as_tuple() # Now resample the gap map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity( (_map, ), start, end, gran) map_names = [] map_positions = [] increment = 1.0 / (len(map_matrix) + 1.0) position = increment count = 0 for intp_list in map_matrix: new_map = intp_list[0] count += 1 new_id = "%s_%i@%s" % (_map.get_name(), count, tgis.get_current_mapset()) new_map.set_id(new_id) overwrite_flags[new_id] = False if new_map.map_exists() or new_map.is_in_db(dbif): if not grass.overwrite: grass.fatal( _("Map with name <%s> already exists. " "Please use another base name." % (_id))) else: if new_map.is_in_db(dbif): overwrite_flags[new_id] = True map_names.append(new_map.get_name()) map_positions.append(position) position += increment result_list.append(new_map) mod = copy.deepcopy(gapfill_module) mod(input=(predecessor.get_map_id(), successor.get_map_id()), datapos=(0, 1), output=map_names, samplingpos=map_positions) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Insert new interpolated maps in temporal database and dataset for _map in result_list: id = _map.get_id() if overwrite_flags[id] == True: if _map.is_time_absolute(): start, end = _map.get_absolute_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_absolute_time(start, end) else: start, end, unit = _map.get_relative_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_relative_time(start, end, unit) _map.load() _map.insert(dbif) sp.register_map(_map, dbif) sp.update_from_registered_maps(dbif) sp.update_command_string(dbif=dbif) dbif.close()
def main(): options, flags = grass.parser() elevation_input = options['elevation'] aspect_input = options['aspect'] slope_input = options['slope'] linke = options['linke'] linke_value = options['linke_value'] albedo = options['albedo'] albedo_value = options['albedo_value'] beam_rad_basename = options['beam_rad_basename'] diff_rad_basename = options['diff_rad_basename'] refl_rad_basename = options['refl_rad_basename'] glob_rad_basename = options['glob_rad_basename'] incidout_basename = options['incidout_basename'] if not any([beam_rad_basename, diff_rad_basename, refl_rad_basename, glob_rad_basename, incidout_basename]): grass.fatal(_("No output specified.")) start_time = float(options['start_time']) end_time = float(options['end_time']) time_step = float(options['time_step']) nprocs = int(options['nprocs']) day = int(options['day']) temporal = flags['t'] binary = flags['b'] binaryTmpName = 'binary' year = int(options['year']) if not is_grass_7() and temporal: grass.warning(_("Flag t has effect only in GRASS 7")) # check: start < end if start_time > end_time: grass.fatal(_("Start time is after end time.")) if time_step >= end_time - start_time: grass.fatal(_("Time step is too big.")) # here we check all the days if not grass.overwrite(): check_time_map_names(beam_rad_basename, grass.gisenv()['MAPSET'], start_time, end_time, time_step, binary, binaryTmpName) check_time_map_names(diff_rad_basename, grass.gisenv()['MAPSET'], start_time, end_time, time_step, binary, binaryTmpName) check_time_map_names(refl_rad_basename, grass.gisenv()['MAPSET'], start_time, end_time, time_step, binary, binaryTmpName) check_time_map_names(glob_rad_basename, grass.gisenv()['MAPSET'], start_time, end_time, time_step, binary, binaryTmpName) # check for slope/aspect if not aspect_input or not slope_input: params = {} if not aspect_input: aspect_input = create_tmp_map_name('aspect') params.update({'aspect': aspect_input}) TMP.append(aspect_input) if not slope_input: slope_input = create_tmp_map_name('slope') params.update({'slope': slope_input}) TMP.append(slope_input) grass.info(_("Running r.slope.aspect...")) grass.run_command('r.slope.aspect', elevation=elevation_input, quiet=True, **params) grass.info(_("Running r.sun in a loop...")) count = 0 # Parallel processing proc_list = [] proc_count = 0 suffixes = [] suffixes_all = [] times = list(frange(start_time, end_time, time_step)) num_times = len(times) core.percent(0, num_times, 1) for time in times: count += 1 core.percent(count, num_times, 10) suffix = '_' + format_time(time) proc_list.append(Process(target=run_r_sun, args=(elevation_input, aspect_input, slope_input, day, time, linke, linke_value, albedo, albedo_value, beam_rad_basename, diff_rad_basename, refl_rad_basename, glob_rad_basename, incidout_basename, suffix, binary, binaryTmpName))) proc_list[proc_count].start() proc_count += 1 suffixes.append(suffix) suffixes_all.append(suffix) if proc_count == nprocs or proc_count == num_times or count == num_times: proc_count = 0 exitcodes = 0 for proc in proc_list: proc.join() exitcodes += proc.exitcode if exitcodes != 0: core.fatal(_("Error while r.sun computation")) # Empty process list proc_list = [] suffixes = [] # FIXME: how percent really works? # core.percent(1, 1, 1) # add timestamps either via temporal framework in 7 or r.timestamp in 6.x if is_grass_7() and temporal: core.info(_("Registering created maps into temporal dataset...")) import grass.temporal as tgis def registerToTemporal(basename, suffixes, mapset, start_time, time_step, title, desc): maps = ','.join([basename + suf + '@' + mapset for suf in suffixes]) tgis.open_new_stds(basename, type='strds', temporaltype='absolute', title=title, descr=desc, semantic='mean', dbif=None, overwrite=grass.overwrite()) tgis.register_maps_in_space_time_dataset( type='raster', name=basename, maps=maps, start=start_time, end=None, increment=time_step, dbif=None, interval=False) # Make sure the temporal database exists tgis.init() mapset = grass.gisenv()['MAPSET'] absolute_time = datetime.datetime(year, 1, 1) + \ datetime.timedelta(days=day - 1) + \ datetime.timedelta(hours=start_time) start = absolute_time.strftime("%Y-%m-%d %H:%M:%S") step = datetime.timedelta(hours=time_step) step = "%d seconds" % step.seconds if beam_rad_basename: registerToTemporal(beam_rad_basename, suffixes_all, mapset, start, step, title="Beam irradiance", desc="Output beam irradiance raster maps [Wh.m-2]") if diff_rad_basename: registerToTemporal(diff_rad_basename, suffixes_all, mapset, start, step, title="Diffuse irradiance", desc="Output diffuse irradiance raster maps [Wh.m-2]") if refl_rad_basename: registerToTemporal(refl_rad_basename, suffixes_all, mapset, start, step, title="Reflected irradiance", desc="Output reflected irradiance raster maps [Wh.m-2]") if glob_rad_basename: registerToTemporal(glob_rad_basename, suffixes_all, mapset, start, step, title="Total irradiance", desc="Output total irradiance raster maps [Wh.m-2]") if incidout_basename: registerToTemporal(incidout_basename, suffixes_all, mapset, start, step, title="Incidence angle", desc="Output incidence angle raster maps") else: absolute_time = datetime.datetime(year, 1, 1) + \ datetime.timedelta(days=day - 1) for i, time in enumerate(times): grass_time = format_grass_time(absolute_time + datetime.timedelta(hours=time)) if beam_rad_basename: set_time_stamp(beam_rad_basename + suffixes_all[i], time=grass_time) if diff_rad_basename: set_time_stamp(diff_rad_basename + suffixes_all[i], time=grass_time) if refl_rad_basename: set_time_stamp(refl_rad_basename + suffixes_all[i], time=grass_time) if glob_rad_basename: set_time_stamp(glob_rad_basename + suffixes_all[i], time=grass_time) if incidout_basename: set_time_stamp(incidout_basename + suffixes_all[i], time=grass_time) if beam_rad_basename: maps = [beam_rad_basename + suf for suf in suffixes_all] set_color_table(maps, binary) if diff_rad_basename: maps = [diff_rad_basename + suf for suf in suffixes_all] set_color_table(maps, binary) if refl_rad_basename: maps = [refl_rad_basename + suf for suf in suffixes_all] set_color_table(maps, binary) if glob_rad_basename: maps = [glob_rad_basename + suf for suf in suffixes_all] set_color_table(maps, binary) if incidout_basename: maps = [incidout_basename + suf for suf in suffixes_all] set_color_table(maps)
def main(): # Get the options input = options["input"] start = options["start"] stop = options["stop"] base = options["basename"] cycle = options["cycle"] offset = options["offset"] minimum = options["minimum"] maximum = options["maximum"] occurrence = options["occurrence"] range = options["range"] indicator = options["indicator"] staend = options["staend"] register_null = flags["n"] reverse = flags["r"] grass.set_raise_on_error(True) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() mapset = tgis.get_current_mapset() if input.find("@") >= 0: id = input else: id = input + "@" + mapset input_strds = tgis.SpaceTimeRasterDataset(id) if input_strds.is_in_db() == False: dbif.close() grass.fatal( _("Space time %s dataset <%s> not found") % (input_strds.get_output_map_instance(None).get_type(), id)) input_strds.select(dbif) dummy = input_strds.get_new_map_instance(None) # The occurrence space time raster dataset if occurrence: if not minimum or not maximum: if not range: dbif.close() grass.fatal( _("You need to set the range to compute the occurrence" " space time raster dataset")) if occurrence.find("@") >= 0: occurrence_id = occurrence else: occurrence_id = occurrence + "@" + mapset occurrence_strds = tgis.SpaceTimeRasterDataset(occurrence_id) if occurrence_strds.is_in_db(dbif): if not grass.overwrite(): dbif.close() grass.fatal( _("Space time raster dataset <%s> is already in the " "database, use overwrite flag to overwrite") % occurrence_id) # The indicator space time raster dataset if indicator: if not occurrence: dbif.close() grass.fatal( _("You need to set the occurrence to compute the indicator" " space time raster dataset")) if not staend: dbif.close() grass.fatal( _("You need to set the staend options to compute the indicator" " space time raster dataset")) if indicator.find("@") >= 0: indicator = indicator else: indicator_id = indicator + "@" + mapset indicator_strds = tgis.SpaceTimeRasterDataset(indicator_id) if indicator_strds.is_in_db(dbif): if not grass.overwrite(): dbif.close() grass.fatal( _("Space time raster dataset <%s> is already in the " "database, use overwrite flag to overwrite") % indicator_id) staend = staend.split(",") indicator_start = int(staend[0]) indicator_mid = int(staend[1]) indicator_end = int(staend[2]) # The minimum threshold space time raster dataset minimum_strds = None if minimum: if minimum.find("@") >= 0: minimum_id = minimum else: minimum_id = minimum + "@" + mapset minimum_strds = tgis.SpaceTimeRasterDataset(minimum_id) if minimum_strds.is_in_db() == False: dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (minimum_strds.get_id())) if minimum_strds.get_temporal_type() != input_strds.get_temporal_type( ): dbif.close() grass.fatal( _("Temporal type of input strds and minimum strds must be equal" )) minimum_strds.select(dbif) # The maximum threshold space time raster dataset maximum_strds = None if maximum: if maximum.find("@") >= 0: maximum_id = maximum else: maximum_id = maximum + "@" + mapset maximum_strds = tgis.SpaceTimeRasterDataset(maximum_id) if maximum_strds.is_in_db() == False: dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (maximum_strds.get_id())) if maximum_strds.get_temporal_type() != input_strds.get_temporal_type( ): dbif.close() grass.fatal( _("Temporal type of input strds and maximum strds must be equal" )) maximum_strds.select(dbif) input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple( ) if input_strds.is_time_absolute(): start = tgis.string_to_datetime(start) if stop: stop = tgis.string_to_datetime(stop) else: stop = input_strds_end else: start = int(start) if stop: stop = int(stop) else: stop = input_strds_end if input_strds.is_time_absolute(): end = tgis.increment_datetime_by_string(start, cycle) else: end = start + cycle count = 1 indi_count = 1 occurrence_maps = {} indicator_maps = {} while input_strds_end > start and stop > start: # Make sure that the cyclic computation will stop at the correct time if stop and end > stop: end = stop where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start), str(end)) input_maps = input_strds.get_registered_maps_as_objects(where=where, dbif=dbif) print len(input_maps) input_topo = tgis.SpatioTemporalTopologyBuilder() input_topo.build(input_maps, input_maps) if len(input_maps) == 0: continue grass.message(_("Processing cycle %s - %s" % (str(start), str(end)))) count = compute_occurrence(occurrence_maps, input_strds, input_maps, start, base, count, mapset, where, reverse, range, minimum_strds, maximum_strds, dbif) # Indicator computation is based on the occurrence so we need to start it after # the occurrence cycle if indicator: num_maps = len(input_maps) for i in xrange(num_maps): if reverse: map = input_maps[num_maps - i - 1] else: map = input_maps[i] indicator_map_name = "%s_indicator_%i" % (base, indi_count) indicator_map_id = dummy.build_id(indicator_map_name, mapset) indicator_map = input_strds.get_new_map_instance( indicator_map_id) # Check if new map is in the temporal database if indicator_map.is_in_db(dbif): if grass.overwrite(): # Remove the existing temporal database entry indicator_map.delete(dbif) indicator_map = input_strds.get_new_map_instance( indicator_map_id) else: grass.fatal( _("Map <%s> is already registered in the temporal" " database, use overwrite flag to overwrite.") % (indicator_map.get_map_id())) curr_map = occurrence_maps[map.get_id()].get_name() # Reverse time if reverse: if i == 0: prev_map = curr_map subexpr1 = "null()" subexpr3 = "%i" % (indicator_start) elif i > 0 and i < num_maps - 1: prev_map = occurrence_maps[ map.next().get_id()].get_name() next_map = occurrence_maps[ map.prev().get_id()].get_name() # In case the previous map is null() set null() or the start indicator subexpr1 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_start) # In case the previous map was not null() if the current map is null() set null() # if the current map is not null() and the next map is not null() set # intermediate indicator, if the next map is null set the end indicator subexpr2 = "if(isnull(%s), %i, %i)" % ( next_map, indicator_end, indicator_mid) subexpr3 = "if(isnull(%s), null(), %s)" % (curr_map, subexpr2) expression = "%s = if(isnull(%s), %s, %s)" % ( indicator_map_name, prev_map, subexpr1, subexpr3) else: prev_map = occurrence_maps[ map.next().get_id()].get_name() subexpr1 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_start) subexpr3 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_mid) else: if i == 0: prev_map = curr_map subexpr1 = "null()" subexpr3 = "%i" % (indicator_start) elif i > 0 and i < num_maps - 1: prev_map = occurrence_maps[ map.prev().get_id()].get_name() next_map = occurrence_maps[ map.next().get_id()].get_name() # In case the previous map is null() set null() or the start indicator subexpr1 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_start) # In case the previous map was not null() if the current map is null() set null() # if the current map is not null() and the next map is not null() set # intermediate indicator, if the next map is null set the end indicator subexpr2 = "if(isnull(%s), %i, %i)" % ( next_map, indicator_end, indicator_mid) subexpr3 = "if(isnull(%s), null(), %s)" % (curr_map, subexpr2) expression = "%s = if(isnull(%s), %s, %s)" % ( indicator_map_name, prev_map, subexpr1, subexpr3) else: prev_map = occurrence_maps[ map.prev().get_id()].get_name() subexpr1 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_start) subexpr3 = "if(isnull(%s), null(), %i)" % ( curr_map, indicator_mid) expression = "%s = if(isnull(%s), %s, %s)" % ( indicator_map_name, prev_map, subexpr1, subexpr3) print expression grass.mapcalc(expression, overwrite=True) map_start, map_end = map.get_temporal_extent_as_tuple() if map.is_time_absolute(): indicator_map.set_absolute_time(map_start, map_end) else: indicator_map.set_relative_time( map_start, map_end, map.get_relative_time_unit()) indicator_maps[map.get_id()] = indicator_map indi_count += 1 # Increment the cycle start = end if input_strds.is_time_absolute(): start = end if offset: start = tgis.increment_datetime_by_string(end, offset) end = tgis.increment_datetime_by_string(start, cycle) else: if offset: start = end + offset end = start + cycle empty_maps = [] create_strds_register_maps(input_strds, occurrence_strds, occurrence_maps, register_null, empty_maps, dbif) if indicator: create_strds_register_maps(input_strds, indicator_strds, indicator_maps, register_null, empty_maps, dbif) dbif.close() # Remove empty maps if len(empty_maps) > 0: for map in empty_maps: grass.run_command("g.remove", flags='f', type="rast", pattern=map.get_name(), quiet=True)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] strds = options["strds"] where = options["where"] column = options["column"] method = options["method"] tempwhere = options["t_where"] sampling = options["sampling"] if where == "" or where == " " or where == "\n": where = None # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "stvds", dbif) strds_sp = tgis.open_old_stds(strds, "strds", dbif) if strds_sp.get_temporal_type() != sp.get_temporal_type(): dbif.close() grass.fatal( _("Input and aggregation dataset must " "have the same temporal type")) # Check if intervals are present in the sample dataset if sp.get_temporal_type() == "absolute": map_time = sp.absolute_time.get_map_time() else: map_time = sp.relative_time.get_map_time() if map_time != "interval": dbif.close() grass.fatal( _("All registered maps of the space time vector " "dataset must have time intervals")) rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time", tempwhere, "start_time", dbif) if not rows: dbif.close() grass.fatal(_("Space time vector dataset <%s> is empty") % sp.get_id()) # Sample the raster dataset with the vector dataset and run v.what.rast for row in rows: start = row["start_time"] end = row["end_time"] vectmap = row["name"] + "@" + row["mapset"] layer = row["layer"] raster_maps = tgis.collect_map_names(strds_sp, dbif, start, end, sampling) aggreagated_map_name = None if raster_maps: # Aggregation if method != "disabled" and len(raster_maps) > 1: # Generate the temporary map name aggreagated_map_name = "aggreagated_map_name_" + str( os.getpid()) new_map = tgis.aggregate_raster_maps( raster_maps, aggreagated_map_name, start, end, 0, method, False, dbif, ) aggreagated_map_name = aggreagated_map_name + "_0" if new_map is None: continue # We overwrite the raster_maps list raster_maps = (new_map.get_id(), ) for rastermap in raster_maps: if column: col_name = column else: # Create a new column with the SQL compliant # name of the sampled raster map col_name = rastermap.split("@")[0].replace(".", "_") coltype = "DOUBLE PRECISION" # Get raster type rasterinfo = raster.raster_info(rastermap) if rasterinfo["datatype"] == "CELL": coltype = "INT" try: if layer: grass.run_command( "v.db.addcolumn", map=vectmap, layer=layer, column="%s %s" % (col_name, coltype), overwrite=grass.overwrite(), ) else: grass.run_command( "v.db.addcolumn", map=vectmap, column="%s %s" % (col_name, coltype), overwrite=grass.overwrite(), ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add column %s to vector map <%s>") % (col_name, vectmap)) # Call v.what.rast try: if layer: grass.run_command( "v.what.rast", map=vectmap, layer=layer, raster=rastermap, column=col_name, where=where, ) else: grass.run_command( "v.what.rast", map=vectmap, raster=rastermap, column=col_name, where=where, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.what.rast for vector map " "<%s> and raster map <%s>") % (vectmap, rastermap)) if aggreagated_map_name: try: grass.run_command( "g.remove", flags="f", type="raster", name=aggreagated_map_name, ) except CalledModuleError: dbif.close() grass.fatal( _("Unable to remove raster map <%s>") % (aggreagated_map_name)) # Use the first map in case a column names was provided if column: break dbif.close()