def main(options, flags): tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() oldStds = tgis.open_old_stds(options['input'], "stvds", dbif) stampedMaps = oldStds.get_registered_maps_as_objects(dbif=dbif) vectorMaps = get_maps(options['input']) rasterMaps = rasterize(options, vectorMaps, stampedMaps, dbif, gscript.overwrite()) tempType, semanticType, title, description = oldStds.get_initial_values() newStds = tgis.open_new_stds(options['output'], 'strds', tempType, title, description, semanticType, dbif, overwrite=gscript.overwrite()) for map in rasterMaps: map.load() map.insert(dbif) newStds.register_map(map, dbif) newStds.update_from_registered_maps(dbif) dbif.close()
def main(): # lazy imports import grass.temporal as tgis name = options["input"] type = options["type"] gran = options["granularity"] # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.open_old_stds(name, type, dbif) check = stds.shift(gran=gran, dbif=dbif) if check is False: dbif.close() grass.fatal( _("Unable to temporally shift the space time %s dataset <%s>") % (stds.get_new_map_instance(None).get_type(), id)) stds.update_command_string(dbif=dbif) dbif.close()
def main(): import grass.temporal as tgis tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() inp = tgis.open_old_stds(options['input'], 'raster') temp_type, sem_type, title, descr = inp.get_initial_values() out = tgis.open_new_stds(options['output'], 'strds', temp_type, title, descr, sem_type, dbif=dbif, overwrite=gcore.overwrite()) dates = [] for mapp in inp.get_registered_maps_as_objects(): if mapp.get_absolute_time() not in dates: dates.append(mapp.get_absolute_time()) dates.sort() idx = 1 out_maps = [] for dat in dates: outraster = "{ba}_{su}".format(ba=options['basename'], su=idx) out_maps.append(outraster) calculate(inp, dat, out, outraster, options['method']) idx += 1 queue.wait() times = inp.get_absolute_time() tgis.register_maps_in_space_time_dataset('raster', out.get_name(), ','.join(out_maps), start=times[0].strftime(date_format), end=times[1].strftime(date_format), dbif=dbif)
def main(): # Get the options input = options["input"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.gisenv()["MAPSET"] if input.find("@") >= 0: old_id = input else: old_id = input + "@" + mapset if output.find("@") >= 0: new_id = output else: new_id = output + "@" + mapset # Do not overwrite yourself if new_id == old_id: return dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.dataset_factory(type, old_id) if new_id.split("@")[1] != mapset: grass.fatal(_("Space time %s dataset <%s> can not be renamed. " "Mapset of the new identifier differs from the current " "mapset.") % (stds.get_new_map_instance(None).get_type(), old_id)) if stds.is_in_db(dbif=dbif) == False: dbif.close() grass.fatal(_("Space time %s dataset <%s> not found") % ( stds.get_new_map_instance(None).get_type(), old_id)) # Check if the new id is in the database new_stds = tgis.dataset_factory(type, new_id) if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False: dbif.close() grass.fatal(_("Unable to rename Space time %s dataset <%s>. Name <%s> " "is in use, please use the overwrite flag.") % ( stds.get_new_map_instance(None).get_type(), old_id, new_id)) # Remove an already existing space time dataset if new_stds.is_in_db(dbif=dbif) == True: new_stds.delete(dbif=dbif) stds.select(dbif=dbif) stds.rename(ident=new_id, dbif=dbif) stds.update_command_string(dbif=dbif)
def main(): # lazy imports import grass.temporal as tgis # Get the options _input = options["input"] output = options["output"] source = options["source"] target = options["target"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() # specs of input strds sp = tgis.open_old_stds(input, "strds", dbif) ttype, stype, title, descr = sp.get_initial_values() dbif.close() # t.rast.list with columns name, start date, band reference rlist = grass.read_command("t.rast.list", input=_input, columns="name,start_time,band_reference", flags="u") rlistfile = grass.tempfile(create=False) fd = open(rlistfile, "w") if source: source = source.split(',') target = target.split(',') # modify band names for rmap in rlist.splitlines(): name, start_time, band_reference = rmap.split('|') if source: if band_reference in source: idx = source.index(band_reference) band_reference = target[idx] else: band_reference = target[0] fd.write("%s|%s|%s\n" % (name, start_time, band_reference)) fd.close() # t.create, use specs of input strds grass.run_command('t.create', type='strds', output=output, temporaltype=ttype, semantictype=stype, title=title, description=descr) # t.register to create new strds grass.run_command('t.register', input=output, file=rlistfile)
def main(): tgis.init(skip_db_version_check=True) dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() tgis.upgrade_temporal_database(dbif) return 0
def main(): # lazy imports import grass.temporal as tgis tgis.init(skip_db_version_check=True) dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() tgis.upgrade_temporal_database(dbif)
def __init__(self, parent, title=_("Timeline Tool")): wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=title) tgis.init(True) self.datasets = [] self.timeData = {} self._layout() self.temporalType = None self.unit = None # We create a database interface here to speedup the GUI self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect() self.Bind(wx.EVT_CLOSE, self.OnClose)
def __init__(self, parent): wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=_("GRASS GIS Temporal Plot Tool")) tgis.init(True) self.datasets = [] self.output = None self.timeData = {} self._layout() self.temporalType = None self.unit = None # We create a database interface here to speedup the GUI self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect()
def main(): name = options["input"] type = options["type"] # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.open_old_stds(name, type, dbif) stds.snap(dbif=dbif) stds.update_command_string(dbif=dbif) dbif.close()
def main(): strds = options["input"] where = options["where"] nprocs = int(options["nprocs"]) nullmod = pymod.Module("r.null") nullmod.flags.quiet = True if options["null"]: nullmod.inputs.null = options["null"] elif options["setnull"]: nullmod.inputs.setnull = options["setnull"] else: gscript.fatal(_("Please set 'null' or 'setnull' option")) tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where, "start_time", None) if maps is None: gscript.fatal( _("Space time raster dataset {st} seems to be " "empty".format(st=strds))) return 1 # module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) for mapp in maps: count += 1 mod = copy.deepcopy(nullmod) mod.inputs.map = mapp.get_id() process_queue.put(mod) if count % 10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait()
def __init__(self, parent, giface): wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=_("GRASS GIS Temporal Plot Tool")) tgis.init(True) self._giface = giface self.datasetsV = None self.datasetsR = None # self.vectorDraw=False # self.rasterDraw=False self.init() self._layout() # We create a database interface here to speedup the GUI self.dbif = tgis.SQLDatabaseInterfaceConnection() self.dbif.connect() self.Bind(wx.EVT_CLOSE, self.onClose)
def main(options, flags): strds = options["strds"] out_name = options["output"] where = options["where"] sep = options["separator"] donodata = "" if flags["i"]: donodata = "i" # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where, "start_time", None) if maps is None: gscript.fatal( _("Space time raster dataset {st} seems to be " "empty".format(st=strds))) return 1 mapnames = [mapp.get_name() for mapp in maps] try: gscript.run_command( "r.out.xyz", input=",".join(mapnames), output=out_name, separator=sep, flags=donodata, overwrite=gscript.overwrite(), ) gscript.message( _("Space time raster dataset {st} exported to " "{pa}".format(st=strds, pa=out_name))) except: gscript.fatal( _("Unable to export space time raster dataset " "{st}".format(st=strds))) return 1
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] gran = options["granularity"] base = options["basename"] register_null = flags["n"] method = options["method"] sampling = options["sampling"] offset = options["offset"] nprocs = options["nprocs"] time_suffix = flags["s"] topo_list = sampling.split(",") tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds", dbif) map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif) if not map_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % input) # We will create the strds later, but need to check here tgis.check_new_stds(output, "strds", dbif, gcore.overwrite()) start_time = map_list[0].temporal_extent.get_start_time() if sp.is_time_absolute(): start_time = tgis.adjust_datetime_to_granularity(start_time, gran) # We use the end time first end_time = map_list[-1].temporal_extent.get_end_time() has_end_time = True # In case no end time is available, then we use the start time of the last map layer if end_time is None: end_time = map_list[- 1].temporal_extent.get_start_time() has_end_time = False granularity_list = [] # Build the granularity list while True: if has_end_time is True: if start_time >= end_time: break else: if start_time > end_time: break granule = tgis.RasterDataset(None) start = start_time if sp.is_time_absolute(): end = tgis.increment_datetime_by_string(start_time, gran) granule.set_absolute_time(start, end) else: end = start_time + int(gran) granule.set_relative_time(start, end, sp.get_relative_time_unit()) start_time = end granularity_list.append(granule) output_list = tgis.aggregate_by_topology(granularity_list=granularity_list, granularity=gran, map_list=map_list, topo_list=topo_list, basename=base, time_suffix=time_suffix, offset=offset, method=method, nprocs=nprocs, spatial=None, overwrite=gcore.overwrite()) if output_list: temporal_type, semantic_type, title, description = sp.get_initial_values() output_strds = tgis.open_new_stds(output, "strds", temporal_type, title, description, semantic_type, dbif, gcore.overwrite()) tgis.register_map_object_list("rast", output_list, output_strds, register_null, sp.get_relative_time_unit(), dbif) # Update the raster metadata table entries with aggregation type output_strds.set_aggregation_type(method) output_strds.metadata.update(dbif) dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] sampler = options["sample"] where = options["where"] base = options["basename"] register_null = flags["n"] method = options["method"] sampling = options["sampling"] offset = options["offset"] nprocs = options["nprocs"] time_suffix = options["suffix"] type = options["type"] topo_list = sampling.split(",") tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds", dbif) sampler_sp = tgis.open_old_stds(sampler, type, dbif) if sampler_sp.get_temporal_type() != sp.get_temporal_type(): dbif.close() gcore.fatal( _("Input and aggregation dataset must have " "the same temporal type")) # Check if intervals are present if sampler_sp.temporal_extent.get_map_time() != "interval": dbif.close() gcore.fatal( _("All registered maps of the aggregation dataset " "must have time intervals")) # We will create the strds later, but need to check here tgis.check_new_stds(output, "strds", dbif, gcore.overwrite()) map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif) if not map_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % input) granularity_list = sampler_sp.get_registered_maps_as_objects( where=where, order="start_time", dbif=dbif) if not granularity_list: dbif.close() gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler) gran = sampler_sp.get_granularity() output_list = tgis.aggregate_by_topology( granularity_list=granularity_list, granularity=gran, map_list=map_list, topo_list=topo_list, basename=base, time_suffix=time_suffix, offset=offset, method=method, nprocs=nprocs, spatial=None, overwrite=gcore.overwrite(), ) if output_list: temporal_type, semantic_type, title, description = sp.get_initial_values( ) output_strds = tgis.open_new_stds( output, "strds", temporal_type, title, description, semantic_type, dbif, gcore.overwrite(), ) tgis.register_map_object_list( "rast", output_list, output_strds, register_null, sp.get_relative_time_unit(), dbif, ) # Update the raster metadata table entries with aggregation type output_strds.set_aggregation_type(method) output_strds.metadata.update(dbif) dbif.close()
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] use_raster_region = flags["r"] method = options["method"] nprocs = options["nprocs"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) gregion_module = pymod.Module( "g.region", raster="dummy", run_=False, finish_=False, ) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) if use_raster_region is True: reg = copy.deepcopy(gregion_module) reg(raster=map.get_id()) print(reg.get_bash()) print(mod.get_bash()) mm = pymod.MultiModule([reg, mod], sync=False, set_temp_region=True) process_queue.put(mm) else: print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() proc_list = process_queue.get_finished_modules() # Check return status of all finished modules error = 0 for proc in proc_list: if proc.popen.returncode != 0: grass.error( _("Error running module: %\n stderr: %s") % (proc.get_bash(), proc.outputs.stderr)) error += 1 if error > 0: grass.fatal(_("Error running modules.")) # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(): # lazy imports import grass.temporal as tgis from grass.pygrass.modules import Module # Get the options input = options["input"] output = options["output"] start = options["start"] stop = options["stop"] base = options["basename"] cycle = options["cycle"] lower = options["lower"] upper = options["upper"] offset = options["offset"] limits = options["limits"] shift = options["shift"] scale = options["scale"] method = options["method"] granularity = options["granularity"] register_null = flags["n"] reverse = flags["r"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() mapset = tgis.get_current_mapset() if input.find("@") >= 0: id = input else: id = input + "@" + mapset input_strds = tgis.SpaceTimeRasterDataset(id) if input_strds.is_in_db() == False: dbif.close() grass.fatal(_("Space time raster dataset <%s> not found") % (id)) input_strds.select(dbif) if output.find("@") >= 0: out_id = output else: out_id = output + "@" + mapset # The output space time raster dataset output_strds = tgis.SpaceTimeRasterDataset(out_id) if output_strds.is_in_db(dbif): if not grass.overwrite(): dbif.close() grass.fatal( _("Space time raster dataset <%s> is already in the " "database, use overwrite flag to overwrite") % out_id) if tgis.check_granularity_string(granularity, input_strds.get_temporal_type()) == False: dbif.close() grass.fatal(_("Invalid granularity")) if tgis.check_granularity_string(cycle, input_strds.get_temporal_type()) == False: dbif.close() grass.fatal(_("Invalid cycle")) if offset: if tgis.check_granularity_string( offset, input_strds.get_temporal_type()) == False: dbif.close() grass.fatal(_("Invalid offset")) # The lower threshold space time raster dataset if lower: if not range: dbif.close() grass.fatal( _("You need to set the range to compute the occurrence" " space time raster dataset")) if lower.find("@") >= 0: lower_id = lower else: lower_id = lower + "@" + mapset lower_strds = tgis.SpaceTimeRasterDataset(lower_id) if lower_strds.is_in_db() == False: dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (lower_strds.get_id())) if lower_strds.get_temporal_type() != input_strds.get_temporal_type(): dbif.close() grass.fatal( _("Temporal type of input strds and lower strds must be equal") ) lower_strds.select(dbif) # The upper threshold space time raster dataset if upper: if not lower: dbif.close() grass.fatal( _("The upper option works only in conjunction with the lower option" )) if upper.find("@") >= 0: upper = upper else: upper_id = upper + "@" + mapset upper_strds = tgis.SpaceTimeRasterDataset(upper_id) if upper_strds.is_in_db() == False: dbif.close() grass.fatal( _("Space time raster dataset <%s> not found") % (upper_strds.get_id())) if upper_strds.get_temporal_type() != input_strds.get_temporal_type(): dbif.close() grass.fatal( _("Temporal type of input strds and upper strds must be equal") ) upper_strds.select(dbif) input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple( ) if input_strds.is_time_absolute(): start = tgis.string_to_datetime(start) if stop: stop = tgis.string_to_datetime(stop) else: stop = input_strds_end start = tgis.adjust_datetime_to_granularity(start, granularity) else: start = int(start) if stop: stop = int(stop) else: stop = input_strds_end if input_strds.is_time_absolute(): end = tgis.increment_datetime_by_string(start, cycle) else: end = start + cycle limit_relations = [ "EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS" ] count = 1 output_maps = [] while input_strds_end > start and stop > start: # Make sure that the cyclic computation will stop at the correct time if stop and end > stop: end = stop where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start), str(end)) input_maps = input_strds.get_registered_maps_as_objects(where=where, dbif=dbif) grass.message(_("Processing cycle %s - %s" % (str(start), str(end)))) if len(input_maps) == 0: continue # Lets create a dummy list of maps with granularity conform intervals gran_list = [] gran_list_low = [] gran_list_up = [] gran_start = start while gran_start < end: map = input_strds.get_new_map_instance("%i@%i" % (count, count)) if input_strds.is_time_absolute(): gran_end = tgis.increment_datetime_by_string( gran_start, granularity) map.set_absolute_time(gran_start, gran_end) gran_start = tgis.increment_datetime_by_string( gran_start, granularity) else: gran_end = gran_start + granularity map.set_relative_time(gran_start, gran_end, input_strds.get_relative_time_unit()) gran_start = gran_start + granularity gran_list.append(copy(map)) gran_list_low.append(copy(map)) gran_list_up.append(copy(map)) # Lists to compute the topology with upper and lower datasets # Create the topology between the granularity conform list and all maps # of the current cycle gran_topo = tgis.SpatioTemporalTopologyBuilder() gran_topo.build(gran_list, input_maps) if lower: lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif) gran_lower_topo = tgis.SpatioTemporalTopologyBuilder() gran_lower_topo.build(gran_list_low, lower_maps) if upper: upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif) gran_upper_topo = tgis.SpatioTemporalTopologyBuilder() gran_upper_topo.build(gran_list_up, upper_maps) old_map_name = None # Aggregate num_maps = len(gran_list) for i in range(num_maps): if reverse: map = gran_list[num_maps - i - 1] else: map = gran_list[i] # Select input maps based on temporal topology relations input_maps = [] if map.get_equal(): input_maps += map.get_equal() elif map.get_contains(): input_maps += map.get_contains() elif map.get_overlaps(): input_maps += map.get_overlaps() elif map.get_overlapped(): input_maps += map.get_overlapped() elif map.get_during(): input_maps += map.get_during() # Check input maps if len(input_maps) == 0: continue # New output map if input_strds.get_temporal_type( ) == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), input_strds.get_granularity()) output_map_name = "{ba}_{su}".format(ba=base, su=suffix) elif input_strds.get_temporal_type( ) == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) output_map_name = "{ba}_{su}".format(ba=base, su=suffix) else: output_map_name = tgis.create_numeric_suffix( base, count, time_suffix) output_map_id = map.build_id(output_map_name, mapset) output_map = input_strds.get_new_map_instance(output_map_id) # Check if new map is in the temporal database if output_map.is_in_db(dbif): if grass.overwrite(): # Remove the existing temporal database entry output_map.delete(dbif) output_map = input_strds.get_new_map_instance( output_map_id) else: grass.fatal( _("Map <%s> is already registered in the temporal" " database, use overwrite flag to overwrite.") % (output_map.get_map_id())) map_start, map_end = map.get_temporal_extent_as_tuple() if map.is_time_absolute(): output_map.set_absolute_time(map_start, map_end) else: output_map.set_relative_time(map_start, map_end, map.get_relative_time_unit()) limits_vals = limits.split(",") limits_lower = float(limits_vals[0]) limits_upper = float(limits_vals[1]) lower_map_name = None if lower: relations = gran_list_low[i].get_temporal_relations() for relation in limit_relations: if relation in relations: lower_map_name = str(relations[relation][0].get_id()) break upper_map_name = None if upper: relations = gran_list_up[i].get_temporal_relations() for relation in limit_relations: if relation in relations: upper_map_name = str(relations[relation][0].get_id()) break input_map_names = [] for input_map in input_maps: input_map_names.append(input_map.get_id()) # Set up the module accmod = Module("r.series.accumulate", input=input_map_names, output=output_map_name, run_=False) if old_map_name: accmod.inputs["basemap"].value = old_map_name if lower_map_name: accmod.inputs["lower"].value = lower_map_name if upper_map_name: accmod.inputs["upper"].value = upper_map_name accmod.inputs["limits"].value = (limits_lower, limits_upper) if shift: accmod.inputs["shift"].value = float(shift) if scale: accmod.inputs["scale"].value = float(scale) if method: accmod.inputs["method"].value = method print(accmod) accmod.run() if accmod.popen.returncode != 0: dbif.close() grass.fatal(_("Error running r.series.accumulate")) output_maps.append(output_map) old_map_name = output_map_name count += 1 # Increment the cycle start = end if input_strds.is_time_absolute(): start = end if offset: start = tgis.increment_datetime_by_string(end, offset) end = tgis.increment_datetime_by_string(start, cycle) else: if offset: start = end + offset end = start + cycle # Insert the maps into the output space time dataset if output_strds.is_in_db(dbif): if grass.overwrite(): output_strds.delete(dbif) output_strds = input_strds.get_new_instance(out_id) temporal_type, semantic_type, title, description = input_strds.get_initial_values( ) output_strds.set_initial_values(temporal_type, semantic_type, title, description) output_strds.insert(dbif) empty_maps = [] # Register the maps in the database count = 0 for output_map in output_maps: count += 1 if count % 10 == 0: grass.percent(count, len(output_maps), 1) # Read the raster map data output_map.load() # In case of a empty map continue, do not register empty maps if not register_null: if output_map.metadata.get_min() is None and \ output_map.metadata.get_max() is None: empty_maps.append(output_map) continue # Insert map in temporal database output_map.insert(dbif) output_strds.register_map(output_map, dbif) # Update the spatio-temporal extent and the metadata table entries output_strds.update_from_registered_maps(dbif) grass.percent(1, 1, 1) dbif.close() # Remove empty maps if len(empty_maps) > 0: for map in empty_maps: grass.run_command("g.remove", flags='f', type="raster", name=map.get_name(), quiet=True)
def main(options, flags): import grass.pygrass.modules as pymod import grass.temporal as tgis from grass.pygrass.vector import VectorTopo invect = options["input"] if invect.find('@') != -1: invect = invect.split('@')[0] incol = options["date_column"] indate = options["date"] strds = options["strds"] if strds.find('@') != -1: strds_name = strds.split('@')[0] else: strds_name = strds output = options["output"] cols = options["columns"].split(',') mets = options["method"].split(',') gran = options["granularity"] dateformat = options["date_format"] separator = gscript.separator(options["separator"]) stdout = False if output != '-' and flags['u']: gscript.fatal(_("Cannot combine 'output' option and 'u' flag")) elif output != '-' and flags['c']: gscript.fatal(_("Cannot combine 'output' option and 'c' flag")) elif output == '-' and (flags['u'] or flags['c']): output = invect gscript.warning(_("Attribute table of vector {name} will be updated" "...").format(name=invect)) else: stdout = True if flags['c']: cols = [] for m in mets: colname = "{st}_{me}".format(st=strds_name, me=m) cols.append(colname) try: pymod.Module("v.db.addcolumn", map=invect, columns="{col} " "double precision".format(col=colname)) except CalledModuleError: gscript.fatal(_("Not possible to create column " "{col}".format(col=colname))) if output != '-' and len(cols) != len(mets): gscript.fatal(_("'columns' and 'method' options must have the same " "number of elements")) tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) if sp.get_temporal_type() == 'absolute': delta = int(tgis.gran_to_gran(gran, sp.get_granularity(), True)) if tgis.gran_singular_unit(gran) in ['year', 'month']: delta = int(tgis.gran_to_gran(gran, '1 day', True)) td = timedelta(delta) elif tgis.gran_singular_unit(gran) == 'day': delta = tgis.gran_to_gran(gran, sp.get_granularity(), True) td = timedelta(delta) elif tgis.gran_singular_unit(gran) == 'hour': td = timedelta(hours=delta) elif tgis.gran_singular_unit(gran) == 'minute': td = timedelta(minutes=delta) elif tgis.gran_singular_unit(gran) == 'second': td = timedelta(seconds=delta) else: if sp.get_granularity() >= int(gran): gscript.fatal(_("Input granularity is smaller or equal to the {iv}" " STRDS granularity".format(iv=strds))) td = int(gran) if incol and indate: gscript.fatal(_("Cannot combine 'date_column' and 'date' options")) elif not incol and not indate: gscript.fatal(_("You have to fill 'date_column' or 'date' option")) elif incol: try: dates = pymod.Module("db.select", flags='c', stdout_=PI, stderr_=PI, sql="SELECT DISTINCT {dc} from " "{vmap} order by {dc}".format(vmap=invect, dc=incol)) mydates = dates.outputs["stdout"].value.splitlines() except CalledModuleError: gscript.fatal(_("db.select return an error")) elif indate: mydates = [indate] pymap = VectorTopo(invect) pymap.open('r') if len(pymap.dblinks) == 0: try: pymap.close() pymod.Module("v.db.addtable", map=invect) except CalledModuleError: dbif.close() gscript.fatal(_("Unable to add table <%s> to vector map " "<%s>" % invect)) if pymap.is_open(): pymap.close() qfeat = pymod.Module("v.category", stdout_=PI, stderr_=PI, input=invect, option='print') myfeats = qfeat.outputs["stdout"].value.splitlines() if stdout: outtxt = '' for data in mydates: if sp.get_temporal_type() == 'absolute': fdata = datetime.strptime(data, dateformat) else: fdata = int(data) if flags['a']: sdata = fdata + td mwhere = "start_time >= '{inn}' and end_time < " \ "'{out}'".format(inn=fdata, out=sdata) else: sdata = fdata - td mwhere = "start_time >= '{inn}' and end_time < " \ "'{out}'".format(inn=sdata, out=fdata) lines = None try: r_what = pymod.Module("t.rast.what", points=invect, strds=strds, layout='timerow', separator=separator, flags="v", where=mwhere, quiet=True, stdout_=PI, stderr_=PI) lines = r_what.outputs["stdout"].value.splitlines() except CalledModuleError: pass if incol: try: qfeat = pymod.Module("db.select", flags='c', stdout_=PI, stderr_=PI, sql="SELECT DISTINCT cat from" " {vmap} where {dc}='{da}' order by " "cat".format(vmap=invect, da=data, dc=incol)) myfeats = qfeat.outputs["stdout"].value.splitlines() except CalledModuleError: gscript.fatal(_("db.select returned an error for date " "{da}".format(da=data))) if not lines and stdout: for feat in myfeats: outtxt += "{di}{sep}{da}".format(di=feat, da=data, sep=separator) for n in range(len(mets)): outtxt += "{sep}{val}".format(val='*', sep=separator) outtxt += "\n" if not lines: continue x = 0 for line in lines: vals = line.split(separator) if vals[0] in myfeats: try: nvals = np.array(vals[4:]).astype(np.float) except ValueError: if stdout: outtxt += "{di}{sep}{da}".format(di=vals[0], da=data, sep=separator) for n in range(len(mets)): outtxt += "{sep}{val}".format(val='*', sep=separator) outtxt += "\n" continue if stdout: outtxt += "{di}{sep}{da}".format(di=vals[0], da=data, sep=separator) for n in range(len(mets)): result = return_value(nvals, mets[n]) if stdout: outtxt += "{sep}{val}".format(val=result, sep=separator) else: try: if incol: pymod.Module("v.db.update", map=output, column=cols[n], value=str(result), where="{dc}='{da}' AND cat=" "{ca}".format(da=data, ca=vals[0], dc=incol)) else: pymod.Module("v.db.update", map=output, column=cols[n], value=str(result), where="cat={ca}".format(ca=vals[0])) except CalledModuleError: gscript.fatal(_("v.db.update return an error")) if stdout: outtxt += "\n" if x == len(myfeats): break else: x += 1 if stdout: print(outtxt)
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] register_null = flags["n"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the HANTS module hants_flags = "" if flags["l"]: hants_flags = hants_flags + 'l' if flags["h"]: hants_flags = hants_flags + 'h' if flags["i"]: hants_flags = hants_flags + 'i' kwargs = dict() kwargs['nf'] = options['nf'] if options['fet']: kwargs['fet'] = options['fet'] kwargs['dod'] = options['dod'] if options['range']: kwargs['range'] = options['range'] kwargs['suffix'] = "_hants" if len(hants_flags) > 0: kwargs['flags'] = hants_flags count = 0 num_maps = len(maps) new_maps = [] maplistfile = script.tempfile() fd = open(maplistfile, 'w') # create list of input maps and their time stamps for map in maps: count += 1 map_name = "{ba}_hants".format(ba=map.get_id()) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) f.write("{0}\n".format(map.get_id())) f.close() # run r.hants grass.run_command('r.hants', file=maplistfile, suffix="_hants", quiet=True, **kwargs) # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
new.put_row(newrow) ################################################################################ ################################################################################ # importamos las librerias para el procesamiento from grass.pygrass.raster import RasterRow import matplotlib.pyplot as plt import grass.temporal as tgis from datetime import datetime # realizamos la conexion con la base de datos temporal tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() # creamos el strds que debemos rellenar SPI_RF = 'spi_rf' dataset = tgis.open_new_stds(name=SPI_RF, type='strds', temporaltype='absolute', title="SPI RF", descr="SPI predicho por RF", semantic='mean', overwrite=True) dataset_name_rf = 'spi_rf@PERMANENT' dataset = tgis.open_old_stds(dataset_name_rf, "strds", dbif=dbif)
def main(): # lazy imports import grass.temporal as tgis # Get the options name = options["input"] type = options["type"] title = options["title"] aggr_type = options["aggr_type"] description = options["description"] semantic = options["semantictype"] update = flags["u"] map_update = flags["m"] # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds = tgis.open_old_stds(name, type, dbif) update = False if aggr_type and type == "stvds": return () if aggr_type and type != "stvds": stds.metadata.set_aggregation_type(aggregation_type=aggr_type) update = True if title: stds.metadata.set_title(title=title) update = True # Update only non-null entries if description: stds.metadata.set_description(description=description) update = True if semantic: stds.base.set_semantic_type(semantic_type=semantic) update = True if update: stds.update(dbif=dbif) if map_update: # Update the registered maps from the grass spatial database statement = "" # This dict stores the datasets that must be updated dataset_dict = {} count = 0 maps = stds.get_registered_maps_as_objects(dbif=dbif) # We collect the delete and update statements for map in maps: count += 1 if count % 10 == 0: grass.percent(count, len(maps), 1) map.select(dbif=dbif) # Check if the map is present in the grass spatial database # Update if present, delete if not present if map.map_exists(): # Read new metadata from the spatial database map.load() statement += map.update(dbif=dbif, execute=False) else: # Delete the map from the temporal database # We need to update all effected space time datasets datasets = map.get_registered_stds(dbif) if datasets: for dataset in datasets: dataset_dict[dataset] = dataset # Collect the delete statements statement += map.delete(dbif=dbif, update=False, execute=False) # Execute the collected SQL statements dbif.execute_transaction(statement) # Update the effected space time datasets for id in dataset_dict: stds_new = stds.get_new_instance(id) stds_new.select(dbif=dbif) stds_new.update_from_registered_maps(dbif=dbif) if map_update or update: stds.update_from_registered_maps(dbif=dbif) stds.update_command_string(dbif=dbif) dbif.close()
def main(options, flags): # Get the options points = options["points"] strds = options["strds"] output = options["output"] where = options["where"] order = options["order"] column = options["column"] separator = options["separator"] coordinates = options["coordinates"] # Setup separator if separator == "pipe": separator = "|" if separator == "comma": separator = "," if separator == "space": separator = " " if separator == "tab": separator = "\t" if separator == "newline": separator = "\n" use_cats = False write_header = flags["n"] use_raster_region = flags["r"] overwrite = gscript.overwrite() if points and coordinates: gscript.fatal(_("points and coordinates are mutually exclusive")) if not points and not coordinates: gscript.fatal(_("You must specify points or coordinates")) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=dbif) dbif.close() if not maps: gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id()) # The list of sample points p_list = [] if not coordinates: # Check if the chosen header column is in the vector map vname = points vmapset= "" if "@" in points: vname, vmapset = points.split("@") v = pyvect.VectorTopo(vname, vmapset) v.open("r") col_index = 0 if v.exist() is False: gscript.fatal(_("Vector map <%s> does not exist" %(points))) if not v.table: use_cats = True gscript.warning(_("Vector map <%s> does not have an attribute table, using cats as header column." %(points))) if v.table and column not in v.table.columns: gscript.fatal(_("Vector map <%s> has no column named %s" %(points, column))) if use_cats is False: col_index = list(v.table.columns.names()).index(column) # Create the point list for line in v: if line.gtype == libvect.GV_POINT: if use_cats is False: p = SamplePoint(line.x, line.y, line.cat, line.attrs.values()[col_index]) elif use_cats is True: p = SamplePoint(line.x, line.y, line.cat) p_list.append(p) v.close() else: # Convert the coordinates into sample points coord_list = coordinates.split(",") use_cats = True count = 0 cat = 1 while count < len(coord_list): x = coord_list[count] count += 1 y = coord_list[count] count += 1 p = SamplePoint(float(x), float(y), cat) p_list.append(p) cat += 1 if output: out_file = open(output, "w") else: out_file = sys.stdout # Write the header if write_header: out_file.write("start_time") out_file.write(separator) out_file.write("end_time") out_file.write(separator) count = 0 for p in p_list: count += 1 if use_cats is True: out_file.write(str(p.cat)) else: out_file.write(str(p.column)) if count != len(p_list): out_file.write(separator) out_file.write("\n") # Sorting the points by y-coordinate to make use of the single row cache and read direction sorted_p_list = sorted(p_list, key=SamplePointComparisonY) # Sample the raster maps num = 0 for map in maps: num += 1 sys.stderr.write("Sample map <%s> number %i out of %i\n" %(map.get_name(), num, len(maps))) start, end = map.get_temporal_extent_as_tuple() out_file.write(str(start)) out_file.write(separator) if not end: out_file.write(str(start)) else: out_file.write(str(end)) out_file.write(separator) r = pyrast.RasterRow(map.get_name(), map.get_mapset()) if r.exist() is False: gscript.fatal(_("Raster map <%s> does not exist" %(map.get_id()))) region = None if use_raster_region is True: r.set_region_from_rast() region = pyregion.Region() region.from_rast(map.get_name()) # Open the raster layer after the region settings r.open("r") # Sample the raster maps with the sorted points for p in sorted_p_list: p.value = r.get_value(point=p, region=region) # Write the point values from the original list count = 0 for p in p_list: count += 1 out_file.write(str(p.value)) if count != len(p_list): out_file.write(separator) out_file.write("\n") r.close() out_file.close()
def main(): options, flags = gs.parser() # it does not check if pngs and other files exists, # maybe it could check the any/all file(s) dir if options['raster'] and options['strds']: gs.fatal( _("Options raster and strds cannot be specified together." " Please decide for one of them.")) if options['raster'] and options['where']: gs.fatal( _("Option where cannot be combined with the option raster." " Please don't set where option or use strds option" " instead of raster option.")) if options['raster']: if ',' in options['raster']: maps = options['raster'].split(',') # TODO: skip empty parts else: maps = [options['raster']] elif options['strds']: # import and init only when needed # init is called anyway when the generated form is used import grass.temporal as tgis strds = options['strds'] where = options['where'] # make sure the temporal database exists tgis.init() # create the space time raster object ds = tgis.open_old_space_time_dataset(strds, 'strds') # check if the dataset is in the temporal database if not ds.is_in_db(): gs.fatal(_("Space time dataset <%s> not found") % strds) # we need a database interface dbiface = tgis.SQLDatabaseInterfaceConnection() dbiface.connect() # the query rows = ds.get_registered_maps(columns='id', where=where, order='start_time') if not rows: gs.fatal( _("Cannot get any maps for spatio-temporal raster" " dataset <%s>." " Dataset is empty or you temporal WHERE" " condition filtered all maps out." " Please, specify another dataset," " put maps into this dataset" " or correct your WHERE condition.") % strds) maps = [row['id'] for row in rows] else: gs.fatal( _("Either raster or strds option must be specified." " Please specify one of them.")) # get the number of maps for later use num_maps = len(maps) out_dir = options['output'] if not os.path.exists(out_dir): # TODO: maybe we could create the last dir on specified path? gs.fatal( _("Output path <%s> does not exists." " You need to create the (empty) output directory" " yourself before running this module.") % out_dir) epsg = int(options['epsg']) if ',' in options['opacity']: opacities = [ float(opacity) for opacity in options['opacity'].split(',') ] if len(opacities) != num_maps: gs.fatal( _("Number of opacities <{no}> does not match number" " of maps <{nm}>.").format(no=len(opacities), nm=num_maps)) else: opacities = [float(options['opacity'])] * num_maps if ',' in options['info']: infos = options['info'].split(',') else: infos = [options['info']] if 'geotiff' in infos and not gs.find_program('r.out.tiff', '--help'): gs.fatal(_("Install r.out.tiff add-on module to export GeoTIFF")) # r.out.png options compression = int(options['compression']) # flag w is passed to r.out.png.proj # our flag n is inversion of r.out.png.proj's t flag # (transparent NULLs are better for overlay) # we always need the l flag (ll .wgs84 file) routpng_flags = '' if not flags['n']: routpng_flags += 't' if flags['w']: routpng_flags += 'w' # r.out.png.proj l flag for LL .wgs84 file is now function parameter # and is specified bellow if flags['m']: use_region = False # we will use map extent gs.use_temp_region() else: use_region = True # hard coded file names data_file_name = 'data_file.csv' js_data_file_name = 'data_file.js' data_file = open(os.path.join(out_dir, data_file_name), 'w') js_data_file = open(os.path.join(out_dir, js_data_file_name), 'w') js_data_file.write('/* This file was generated by r.out.leaflet GRASS GIS' ' module. */\n\n') js_data_file.write('var layerInfos = [\n') for i, map_name in enumerate(maps): if not use_region: gs.run_command('g.region', rast=map_name) if '@' in map_name: pure_map_name = map_name.split('@')[0] else: pure_map_name = map_name # TODO: mixing current and map's mapset at this point if '@' in map_name: map_name, src_mapset_name = map_name.split('@') else: # TODO: maybe mapset is mandatory for those out of current mapset? src_mapset_name = gs.gisenv()['MAPSET'] image_file_name = pure_map_name + '.png' image_file_path = os.path.join(out_dir, image_file_name) # TODO: skip writing to file and extract the information from # function, or use object if function is so large wgs84_file = image_file_path + '.wgs84' export_png_in_projection(map_name=map_name, src_mapset_name=src_mapset_name, output_file=image_file_path, epsg_code=epsg, compression=compression, routpng_flags=routpng_flags, wgs84_file=wgs84_file, use_region=True) data_file.write(pure_map_name + ',' + image_file_name + '\n') # it doesn't matter in which location we are, it just uses the current # location, not tested for LL loc, assuming that to be nop. map_extent = get_map_extent_for_file(wgs84_file) bounds = map_extent_to_js_leaflet_list(map_extent) extra_attributes = [] generate_infos(map_name=map_name, projected_png_file=image_file_path, required_infos=infos, output_directory=out_dir, attributes=extra_attributes) # http://www.w3schools.com/js/js_objects.asp js_data_file.write(""" {{title: "{title}", file: "{file_}",""" """ bounds: {bounds}, opacity: {opacity}""".format( title=pure_map_name, file_=image_file_name, bounds=bounds, opacity=opacities[i])) if extra_attributes: extra_js_attributes = [ pair[0] + ': "' + escape_quotes(escape_endlines(escape_backslashes(pair[1]))) + '"' for pair in extra_attributes ] js_data_file.write(', ' + ', '.join(extra_js_attributes)) js_data_file.write("""}\n""") # do not write after the last item if i < num_maps - 1: js_data_file.write(',') js_data_file.write('];\n') data_file.close()
def main(options, flags): # Get the options points = options["points"] coordinates = options["coordinates"] strds = options["strds"] output = options["output"] where = options["where"] order = options["order"] layout = options["layout"] null_value = options["null_value"] separator = options["separator"] nprocs = int(options["nprocs"]) write_header = flags["n"] use_stdin = flags["i"] #output_cat_label = flags["f"] #output_color = flags["r"] #output_cat = flags["i"] overwrite = gscript.overwrite() if coordinates and points: gscript.fatal(_("Options coordinates and points are mutually exclusive")) if not coordinates and not points and not use_stdin: gscript.fatal(_("Please specify the coordinates, the points option or use the 's' option to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates")) if use_stdin: coordinates_stdin = str(sys.__stdin__.read()) # Check if coordinates are given with site names or IDs stdin_length = len(coordinates_stdin.split('\n')[0].split()) if stdin_length <= 2: site_input = False elif stdin_length >= 3: site_input = True else: site_input = False # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=dbif) dbif.close() if not maps: gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id()) # Setup separator if separator == "pipe": separator = "|" if separator == "comma": separator = "," if separator == "space": separator = " " if separator == "tab": separator = "\t" if separator == "newline": separator = "\n" # Setup flags are disabled due to test issues flags = "" #if output_cat_label is True: # flags += "f" #if output_color is True: # flags += "r" #if output_cat is True: # flags += "i" # Configure the r.what module if points: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, points=points, overwrite=overwrite, flags=flags, quiet=True) elif coordinates: # Create a list of values coord_list = coordinates.split(",") r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, coordinates=coord_list, overwrite=overwrite, flags=flags, quiet=True) elif use_stdin: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, stdin_=coordinates_stdin, overwrite=overwrite, flags=flags, quiet=True) else: grass.error(_("Please specify points or coordinates")) if len(maps) < nprocs: nprocs = len(maps) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) num_maps = len(maps) # 400 Maps is the absolute maximum in r.what # We need to determie the number of maps that can be processed # in parallel # First estimate the number of maps per process. We use 400 maps # simultaniously as maximum for a single process num_loops = int(num_maps / (400 * nprocs)) remaining_maps = num_maps % (400 * nprocs) if num_loops == 0: num_loops = 1 remaining_maps = 0 # Compute the number of maps for each process maps_per_loop = int((num_maps - remaining_maps) / num_loops) maps_per_process = int(maps_per_loop / nprocs) remaining_maps_per_loop = maps_per_loop % nprocs # We put the output files in an ordered list output_files = [] output_time_list = [] count = 0 for loop in range(num_loops): file_name = gscript.tempfile() + "_%i"%(loop) count = process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) process_queue.wait() gscript.verbose("Number of raster map layers remaining for sampling %i"%(remaining_maps)) if remaining_maps > 0: # Use a single process if less then 100 maps if remaining_maps <= 100: mod = copy.deepcopy(r_what) mod(map=map_names, output=file_name) process_queue.put(mod) else: maps_per_process = int(remaining_maps / nprocs) remaining_maps_per_loop = remaining_maps % nprocs file_name = "out_remain" process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) # Wait for unfinished processes process_queue.wait() # Out the output files in the correct order together if layout == "row": one_point_per_row_output(separator, output_files, output_time_list, output, write_header, site_input) elif layout == "col": one_point_per_col_output(separator, output_files, output_time_list, output, write_header, site_input) else: one_point_per_timerow_output(separator, output_files, output_time_list, output, write_header, site_input)
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] strds = options["strds"] where = options["where"] column = options["column"] method = options["method"] tempwhere = options["t_where"] sampling = options["sampling"] if where == "" or where == " " or where == "\n": where = None # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "stvds", dbif) strds_sp = tgis.open_old_stds(strds, "strds", dbif) if strds_sp.get_temporal_type() != sp.get_temporal_type(): dbif.close() grass.fatal( _("Input and aggregation dataset must " "have the same temporal type")) # Check if intervals are present in the sample dataset if sp.get_temporal_type() == "absolute": map_time = sp.absolute_time.get_map_time() else: map_time = sp.relative_time.get_map_time() if map_time != "interval": dbif.close() grass.fatal( _("All registered maps of the space time vector " "dataset must have time intervals")) rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time", tempwhere, "start_time", dbif) if not rows: dbif.close() grass.fatal(_("Space time vector dataset <%s> is empty") % sp.get_id()) # Sample the raster dataset with the vector dataset and run v.what.rast for row in rows: start = row["start_time"] end = row["end_time"] vectmap = row["name"] + "@" + row["mapset"] layer = row["layer"] raster_maps = tgis.collect_map_names(strds_sp, dbif, start, end, sampling) aggreagated_map_name = None if raster_maps: # Aggregation if method != "disabled" and len(raster_maps) > 1: # Generate the temporary map name aggreagated_map_name = "aggreagated_map_name_" + \ str(os.getpid()) new_map = tgis.aggregate_raster_maps(raster_maps, aggreagated_map_name, start, end, 0, method, False, dbif) aggreagated_map_name = aggreagated_map_name + "_0" if new_map is None: continue # We overwrite the raster_maps list raster_maps = (new_map.get_id(), ) for rastermap in raster_maps: if column: col_name = column else: # Create a new column with the SQL compliant # name of the sampled raster map col_name = rastermap.split("@")[0].replace(".", "_") coltype = "DOUBLE PRECISION" # Get raster type rasterinfo = raster.raster_info(rastermap) if rasterinfo["datatype"] == "CELL": coltype = "INT" try: if layer: grass.run_command("v.db.addcolumn", map=vectmap, layer=layer, column="%s %s" % (col_name, coltype), overwrite=grass.overwrite()) else: grass.run_command("v.db.addcolumn", map=vectmap, column="%s %s" % (col_name, coltype), overwrite=grass.overwrite()) except CalledModuleError: dbif.close() grass.fatal( _("Unable to add column %s to vector map <%s>") % (col_name, vectmap)) # Call v.what.rast try: if layer: grass.run_command("v.what.rast", map=vectmap, layer=layer, raster=rastermap, column=col_name, where=where) else: grass.run_command("v.what.rast", map=vectmap, raster=rastermap, column=col_name, where=where) except CalledModuleError: dbif.close() grass.fatal( _("Unable to run v.what.rast for vector map " "<%s> and raster map <%s>") % (vectmap, rastermap)) if aggreagated_map_name: try: grass.run_command("g.remove", flags='f', type='raster', name=aggreagated_map_name) except CalledModuleError: dbif.close() grass.fatal( _("Unable to remove raster map <%s>") % (aggreagated_map_name)) # Use the first map in case a column names was provided if column: break dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options type = options["type"] temporal_type = options["temporaltype"] columns = options["columns"] order = options["order"] where = options["where"] separator = gscript.separator(options["separator"]) outpath = options["output"] colhead = flags['c'] # Make sure the temporal database exists tgis.init() sp = tgis.dataset_factory(type, None) dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() first = True if gscript.verbosity() > 0 and not outpath: sys.stderr.write("----------------------------------------------\n") if outpath: outfile = open(outpath, 'w') for ttype in temporal_type.split(","): if ttype == "absolute": time = "absolute time" else: time = "relative time" stds_list = tgis.get_dataset_list(type, ttype, columns, where, order, dbif=dbif) # Use the correct order of the mapsets, hence first the current mapset, then # alphabetic ordering mapsets = tgis.get_tgis_c_library_interface().available_mapsets() # Print for each mapset separately for key in mapsets: if key in stds_list.keys(): rows = stds_list[key] if rows: if gscript.verbosity() > 0 and not outpath: if issubclass(sp.__class__, tgis.AbstractMapDataset): sys.stderr.write(_("Time stamped %s maps with %s available in mapset <%s>:\n")% (sp.get_type(), time, key)) else: sys.stderr.write(_("Space time %s datasets with %s available in mapset <%s>:\n")% (sp.get_new_map_instance(None).get_type(), time, key)) # Print the column names if requested if colhead and first: output = "" count = 0 for key in rows[0].keys(): if count > 0: output += separator + str(key) else: output += str(key) count += 1 if outpath: outfile.write("{st}\n".format(st=output)) else: print(output) first = False for row in rows: output = "" count = 0 for col in row: if count > 0: output += separator + str(col) else: output += str(col) count += 1 if outpath: outfile.write("{st}\n".format(st=output)) else: print(output) if outpath: outfile.close() dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options inputs = options["inputs"] output = options["output"] type = options["type"] # Make sure the temporal database exists tgis.init() #Get the current mapset to create the id of the space time dataset mapset = grass.gisenv()["MAPSET"] inputs_split = inputs.split(",") input_ids = [] for input in inputs_split: if input.find("@") >= 0: input_ids.append(input) else: input_ids.append(input + "@" + mapset) # Set the output name correct if output.find("@") >= 0: out_mapset = output.split("@")[1] if out_mapset != mapset: grass.fatal(_("Output space time dataset <%s> must be located in this mapset") % (output)) else: output_id = output + "@" + mapset dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() stds_list = [] first = None for id in input_ids: stds = tgis.open_old_stds(id, type, dbif) if first is None: first = stds if first.get_temporal_type() != stds.get_temporal_type(): dbif.close() grass.fatal(_("Space time datasets to merge must have the same temporal type")) stds_list.append(stds) # Do nothing if nothing to merge if first is None: dbif.close() return # Check if the new id is in the database output_stds = tgis.dataset_factory(type, output_id) output_exists = output_stds.is_in_db(dbif=dbif) if output_exists == True and grass.overwrite() == False: dbif.close() grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\ "please use the overwrite flag.") % \ (stds.get_new_map_instance(None).get_type(), output_id)) if not output_exists: output_stds = tgis.open_new_stds(output, type, first.get_temporal_type(), "Merged space time dataset", "Merged space time dataset", "mean", dbif=dbif, overwrite=False) else: output_stds.select(dbif=dbif) registered_output_maps = {} # Maps that are already registered in an existing dataset # are not registered again if output_exists == True: rows = output_stds.get_registered_maps(columns="id", dbif=dbif) if rows: for row in rows: registered_output_maps[row["id"]] = row["id"] for stds in stds_list: # Avoid merging of already registered maps if stds.get_id() != output_stds.get_id(): maps = stds.get_registered_maps_as_objects(dbif=dbif) if maps: for map in maps: # Jump over already registered maps if map.get_id() in registered_output_maps: continue map.select(dbif=dbif) output_stds.register_map(map=map, dbif=dbif) # Update the registered map list registered_output_maps[map.get_id()] = map.get_id() output_stds.update_from_registered_maps(dbif=dbif) if output_exists == True: output_stds.update_command_string(dbif=dbif)
def main(): # Get the options datasets = options["inputs"] file = options["file"] type = options["type"] recursive = flags["r"] force = flags["f"] if recursive and not force: grass.fatal(_("The recursive flag works only in conjunction with the force flag: use -rf")) if datasets and file: grass.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file")) # Make sure the temporal database exists tgis.init() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() dataset_list = [] # Dataset names as comma separated string if datasets: if datasets.find(",") == -1: dataset_list = (datasets,) else: dataset_list = tuple(datasets.split(",")) # Read the dataset list from file if file: fd = open(file, "r") line = True while True: line = fd.readline() if not line: break line_list = line.split("\n") dataset_name = line_list[0] dataset_list.append(dataset_name) statement = "" # Create the pygrass Module object for g.remove remove = pyg.Module("g.remove", quiet=True, flags='f', run_=False) for name in dataset_list: name = name.strip() sp = tgis.open_old_stds(name, type, dbif) if recursive and force: grass.message(_("Removing registered maps and %s" % type)) maps = sp.get_registered_maps_as_objects(dbif=dbif) map_statement = "" count = 1 name_list = [] for map in maps: map.select(dbif) # We may have multiple layer for a single map, hence we need # to avoid multiple deletation of the same map, # but the database entries are still present and must be removed if map.get_name() not in name_list: name_list.append(str(map.get_name())) map_statement += map.delete(dbif=dbif, execute=False) count += 1 # Delete every 100 maps if count%100 == 0: dbif.execute_transaction(map_statement) if type == "strds": remove(type="raster", name=name_list, run_=True) if type == "stvds": remove(type="vector", name=name_list, run_=True) if type == "str3ds": remove(type="raster_3d", name=name_list, run_=True) map_statement = "" name_list = [] if map_statement: dbif.execute_transaction(map_statement) if name_list: if type == "strds": remove(type="raster", name=name_list, run_=True) if type == "stvds": remove(type="vector", name=name_list, run_=True) if type == "str3ds": remove(type="raster_3d", name=name_list, run_=True) else: grass.message(_("Note: registered maps themselves have not been removed, only the %s" % type)) statement += sp.delete(dbif=dbif, execute=False) # Execute the collected SQL statenents dbif.execute_transaction(statement) dbif.close()
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] method = options["method"] nprocs = options["nprocs"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(options, flags): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] nprocs = int(options["nprocs"]) step = options["step"] levels = options["levels"] minlevel = options["minlevel"] maxlevel = options["maxlevel"] cut = options["cut"] time_suffix = options["suffix"] register_null = flags["n"] t_flag = flags["t"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" # Configure the r.to.vect module contour_module = pymod.Module("r.contour", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, overwrite=overwrite, quiet=True) if step: contour_module.inputs.step = float(step) if minlevel: contour_module.inputs.minlevel = float(minlevel) if maxlevel: contour_module.inputs.maxlevel = float(maxlevel) if levels: contour_module.inputs.levels = levels.split(",") if cut: contour_module.inputs.cut = int(cut) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning( _("The number of parellel r.contour processes was " "reduced to 1 because of the table attribute " "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(contour_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count % 10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps try: if map.load() is not True: continue except FatalError: continue if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()