def main(): strds = options["input"] where = options["where"] nprocs = int(options["nprocs"]) nullmod = pymod.Module("r.null") nullmod.flags.quiet = True if options["null"]: nullmod.inputs.null = options["null"] elif options["setnull"]: nullmod.inputs.setnull = options["setnull"] else: gscript.fatal(_("Please set 'null' or 'setnull' option")) tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where, "start_time", None) if maps is None: gscript.fatal( _("Space time raster dataset {st} seems to be " "empty".format(st=strds))) return 1 # module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) for mapp in maps: count += 1 mod = copy.deepcopy(nullmod) mod.inputs.map = mapp.get_id() process_queue.put(mod) if count % 10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait()
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] method = options["method"] nprocs = options["nprocs"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(options, flags): # Get the options points = options["points"] coordinates = options["coordinates"] strds = options["strds"] output = options["output"] where = options["where"] order = options["order"] layout = options["layout"] null_value = options["null_value"] separator = options["separator"] nprocs = int(options["nprocs"]) write_header = flags["n"] use_stdin = flags["i"] #output_cat_label = flags["f"] #output_color = flags["r"] #output_cat = flags["i"] overwrite = gscript.overwrite() if coordinates and points: gscript.fatal(_("Options coordinates and points are mutually exclusive")) if not coordinates and not points and not use_stdin: gscript.fatal(_("Please specify the coordinates, the points option or use the 's' option to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates")) if use_stdin: coordinates_stdin = str(sys.__stdin__.read()) # Check if coordinates are given with site names or IDs stdin_length = len(coordinates_stdin.split('\n')[0].split()) if stdin_length <= 2: site_input = False elif stdin_length >= 3: site_input = True else: site_input = False # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=dbif) dbif.close() if not maps: gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id()) # Setup separator if separator == "pipe": separator = "|" if separator == "comma": separator = "," if separator == "space": separator = " " if separator == "tab": separator = "\t" if separator == "newline": separator = "\n" # Setup flags are disabled due to test issues flags = "" #if output_cat_label is True: # flags += "f" #if output_color is True: # flags += "r" #if output_cat is True: # flags += "i" # Configure the r.what module if points: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, points=points, overwrite=overwrite, flags=flags, quiet=True) elif coordinates: # Create a list of values coord_list = coordinates.split(",") r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, coordinates=coord_list, overwrite=overwrite, flags=flags, quiet=True) elif use_stdin: r_what = pymod.Module("r.what", map="dummy", output="dummy", run_=False, separator=separator, stdin_=coordinates_stdin, overwrite=overwrite, flags=flags, quiet=True) else: grass.error(_("Please specify points or coordinates")) if len(maps) < nprocs: nprocs = len(maps) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) num_maps = len(maps) # 400 Maps is the absolute maximum in r.what # We need to determie the number of maps that can be processed # in parallel # First estimate the number of maps per process. We use 400 maps # simultaniously as maximum for a single process num_loops = int(num_maps / (400 * nprocs)) remaining_maps = num_maps % (400 * nprocs) if num_loops == 0: num_loops = 1 remaining_maps = 0 # Compute the number of maps for each process maps_per_loop = int((num_maps - remaining_maps) / num_loops) maps_per_process = int(maps_per_loop / nprocs) remaining_maps_per_loop = maps_per_loop % nprocs # We put the output files in an ordered list output_files = [] output_time_list = [] count = 0 for loop in range(num_loops): file_name = gscript.tempfile() + "_%i"%(loop) count = process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) process_queue.wait() gscript.verbose("Number of raster map layers remaining for sampling %i"%(remaining_maps)) if remaining_maps > 0: # Use a single process if less then 100 maps if remaining_maps <= 100: mod = copy.deepcopy(r_what) mod(map=map_names, output=file_name) process_queue.put(mod) else: maps_per_process = int(remaining_maps / nprocs) remaining_maps_per_loop = remaining_maps % nprocs file_name = "out_remain" process_loop(nprocs, maps, file_name, count, maps_per_process, remaining_maps_per_loop, output_files, output_time_list, r_what, process_queue) # Wait for unfinished processes process_queue.wait() # Out the output files in the correct order together if layout == "row": one_point_per_row_output(separator, output_files, output_time_list, output, write_header, site_input) elif layout == "col": one_point_per_col_output(separator, output_files, output_time_list, output, write_header, site_input) else: one_point_per_timerow_output(separator, output_files, output_time_list, output, write_header, site_input)
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] use_raster_region = flags["r"] method = options["method"] nprocs = options["nprocs"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) gregion_module = pymod.Module( "g.region", raster="dummy", run_=False, finish_=False, ) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) if use_raster_region is True: reg = copy.deepcopy(gregion_module) reg(raster=map.get_id()) print(reg.get_bash()) print(mod.get_bash()) mm = pymod.MultiModule([reg, mod], sync=False, set_temp_region=True) process_queue.put(mm) else: print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() proc_list = process_queue.get_finished_modules() # Check return status of all finished modules error = 0 for proc in proc_list: if proc.popen.returncode != 0: grass.error( _("Error running module: %\n stderr: %s") % (proc.get_bash(), proc.outputs.stderr)) error += 1 if error > 0: grass.fatal(_("Error running modules.")) # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def aggregate_by_topology( granularity_list, granularity, map_list, topo_list, basename, time_suffix, offset=0, method="average", nprocs=1, spatial=None, dbif=None, overwrite=False, file_limit=1000, ): """Aggregate a list of raster input maps with r.series :param granularity_list: A list of AbstractMapDataset objects. The temporal extents of the objects are used to build the spatio-temporal topology with the map list objects :param granularity: The granularity of the granularity list :param map_list: A list of RasterDataset objects that contain the raster maps that should be aggregated :param topo_list: A list of strings of topological relations that are used to select the raster maps for aggregation :param basename: The basename of the new generated raster maps :param time_suffix: Use the granularity truncated start time of the actual granule to create the suffix for the basename :param offset: Use a numerical offset for suffix generation (overwritten by time_suffix) :param method: The aggregation method of r.series (average,min,max, ...) :param nprocs: The number of processes used for parallel computation :param spatial: This indicates if the spatial topology is created as well: spatial can be None (no spatial topology), "2D" using west, east, south, north or "3D" using west, east, south, north, bottom, top :param dbif: The database interface to be used :param overwrite: Overwrite existing raster maps :param file_limit: The maximum number of raster map layers that should be opened at once by r.series :return: A list of RasterDataset objects that contain the new map names and the temporal extent for map registration """ import grass.pygrass.modules as pymod import copy msgr = get_tgis_message_interface() dbif, connection_state_changed = init_dbif(dbif) topo_builder = SpatioTemporalTopologyBuilder() topo_builder.build(mapsA=granularity_list, mapsB=map_list, spatial=spatial) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) # Dummy process object that will be deep copied # and be put into the process queue r_series = pymod.Module( "r.series", output="spam", method=[method], overwrite=overwrite, quiet=True, run_=False, finish_=False, ) g_copy = pymod.Module( "g.copy", raster=["spam", "spamspam"], quiet=True, run_=False, finish_=False ) output_list = [] count = 0 for granule in granularity_list: msgr.percent(count, len(granularity_list), 1) count += 1 aggregation_list = [] if "equal" in topo_list and granule.equal: for map_layer in granule.equal: aggregation_list.append(map_layer.get_name()) if "contains" in topo_list and granule.contains: for map_layer in granule.contains: aggregation_list.append(map_layer.get_name()) if "during" in topo_list and granule.during: for map_layer in granule.during: aggregation_list.append(map_layer.get_name()) if "starts" in topo_list and granule.starts: for map_layer in granule.starts: aggregation_list.append(map_layer.get_name()) if "started" in topo_list and granule.started: for map_layer in granule.started: aggregation_list.append(map_layer.get_name()) if "finishes" in topo_list and granule.finishes: for map_layer in granule.finishes: aggregation_list.append(map_layer.get_name()) if "finished" in topo_list and granule.finished: for map_layer in granule.finished: aggregation_list.append(map_layer.get_name()) if "overlaps" in topo_list and granule.overlaps: for map_layer in granule.overlaps: aggregation_list.append(map_layer.get_name()) if "overlapped" in topo_list and granule.overlapped: for map_layer in granule.overlapped: aggregation_list.append(map_layer.get_name()) if aggregation_list: msgr.verbose( _("Aggregating %(len)i raster maps from %(start)s to" " %(end)s") % ( { "len": len(aggregation_list), "start": str(granule.temporal_extent.get_start_time()), "end": str(granule.temporal_extent.get_end_time()), } ) ) if granule.is_time_absolute() is True and time_suffix == "gran": suffix = create_suffix_from_datetime( granule.temporal_extent.get_start_time(), granularity ) output_name = "{ba}_{su}".format(ba=basename, su=suffix) elif granule.is_time_absolute() is True and time_suffix == "time": suffix = create_time_suffix(granule) output_name = "{ba}_{su}".format(ba=basename, su=suffix) else: output_name = create_numeric_suffix( basename, count + int(offset), time_suffix ) map_layer = RasterDataset("%s@%s" % (output_name, get_current_mapset())) map_layer.set_temporal_extent(granule.get_temporal_extent()) if map_layer.map_exists() is True and overwrite is False: msgr.fatal( _( "Unable to perform aggregation. Output raster " "map <%(name)s> exists and overwrite flag was " "not set" % ({"name": output_name}) ) ) output_list.append(map_layer) if len(aggregation_list) > 1: # Create the r.series input file filename = gscript.tempfile(True) file = open(filename, "w") for name in aggregation_list: string = "%s\n" % (name) file.write(string) file.close() mod = copy.deepcopy(r_series) mod(file=filename, output=output_name) if len(aggregation_list) > int(file_limit): msgr.warning( _( "The limit of open files (%i) was " "reached (%i). The module r.series will " "be run with flag z, to avoid open " "files limit exceeding." % (int(file_limit), len(aggregation_list)) ) ) mod(flags="z") process_queue.put(mod) else: mod = copy.deepcopy(g_copy) mod(raster=[aggregation_list[0], output_name]) process_queue.put(mod) process_queue.wait() if connection_state_changed: dbif.close() msgr.percent(1, 1, 1) return output_list
def main(options, flags): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] nprocs = int(options["nprocs"]) step = options["step"] levels = options["levels"] minlevel = options["minlevel"] maxlevel = options["maxlevel"] cut = options["cut"] time_suffix = options["suffix"] register_null = flags["n"] t_flag = flags["t"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" # Configure the r.to.vect module contour_module = pymod.Module("r.contour", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, overwrite=overwrite, quiet=True) if step: contour_module.inputs.step = float(step) if minlevel: contour_module.inputs.minlevel = float(minlevel) if maxlevel: contour_module.inputs.maxlevel = float(maxlevel) if levels: contour_module.inputs.levels = levels.split(",") if cut: contour_module.inputs.cut = int(cut) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning( _("The number of parellel r.contour processes was " "reduced to 1 because of the table attribute " "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(contour_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count % 10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps try: if map.load() is not True: continue except FatalError: continue if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] base = options["basename"] where = options["where"] nprocs = options["nprocs"] tsuffix = options["suffix"] mapset = grass.encode(grass.gisenv()["MAPSET"]) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(input, "strds") maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif) num = len(maps) # Configure the r.to.vect module gapfill_module = pymod.Module( "r.series.interp", overwrite=grass.overwrite(), quiet=True, run_=False, finish_=False, ) process_queue = pymod.ParallelModuleQueue(int(nprocs)) gap_list = [] overwrite_flags = {} # Identify all gaps and create new names count = 0 for _map in maps: if _map.get_id() is None: count += 1 if sp.get_temporal_type() == 'absolute' and tsuffix in [ 'gran', 'time' ]: _id = "{ba}@{ma}".format(ba=base, ma=mapset) else: map_name = tgis.create_numeric_suffix(base, num + count, tsuffix) _id = "{name}@{ma}".format(name=map_name, ma=mapset) _map.set_id(_id) gap_list.append(_map) if len(gap_list) == 0: grass.message(_("No gaps found")) return # Build the temporal topology tb = tgis.SpatioTemporalTopologyBuilder() tb.build(maps) # Do some checks before computation for _map in gap_list: if not _map.get_precedes() or not _map.get_follows(): grass.fatal( _("Unable to determine successor " "and predecessor of a gap.")) if len(_map.get_precedes()) > 1: grass.warning( _("More than one successor of the gap found. " "Using the first found.")) if len(_map.get_follows()) > 1: grass.warning( _("More than one predecessor of the gap found. " "Using the first found.")) # Interpolate the maps using parallel processing result_list = [] for _map in gap_list: predecessor = _map.get_follows()[0] successor = _map.get_precedes()[0] gran = sp.get_granularity() tmpval, start = predecessor.get_temporal_extent_as_tuple() end, tmpval = successor.get_temporal_extent_as_tuple() # Now resample the gap map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity( (_map, ), start, end, gran) map_names = [] map_positions = [] increment = 1.0 / (len(map_matrix) + 1.0) position = increment count = 0 for intp_list in map_matrix: new_map = intp_list[0] count += 1 if sp.get_temporal_type() == 'absolute' and tsuffix == 'gran': suffix = tgis.create_suffix_from_datetime( new_map.temporal_extent.get_start_time(), sp.get_granularity()) new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(), su=suffix, ma=mapset) elif sp.get_temporal_type() == 'absolute' and tsuffix == 'time': suffix = tgis.create_time_suffix(new_map) new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(), su=suffix, ma=mapset) else: map_name = tgis.create_numeric_suffix(new_map.get_name(), count, tsuffix) new_id = "{name}@{ma}".format(name=map_name, ma=mapset) new_map.set_id(new_id) overwrite_flags[new_id] = False if new_map.map_exists() or new_map.is_in_db(dbif): if not grass.overwrite(): grass.fatal( _("Map with name <%s> already exists. " "Please use another base name." % (_id))) else: if new_map.is_in_db(dbif): overwrite_flags[new_id] = True map_names.append(new_map.get_name()) map_positions.append(position) position += increment result_list.append(new_map) mod = copy.deepcopy(gapfill_module) mod(input=(predecessor.get_map_id(), successor.get_map_id()), datapos=(0, 1), output=map_names, samplingpos=map_positions) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Insert new interpolated maps in temporal database and dataset for _map in result_list: id = _map.get_id() if overwrite_flags[id] == True: if _map.is_time_absolute(): start, end = _map.get_absolute_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_absolute_time(start, end) else: start, end, unit = _map.get_relative_time() if _map.is_in_db(): _map.delete(dbif) _map = sp.get_new_map_instance(id) _map.set_relative_time(start, end, unit) _map.load() _map.insert(dbif) sp.register_map(_map, dbif) sp.update_from_registered_maps(dbif) sp.update_command_string(dbif=dbif) dbif.close()
def main(options, flags): # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] method = options["type"] nprocs = int(options["nprocs"]) column = options["column"] register_null = flags["n"] t_flag = flags["t"] s_flag = flags["s"] v_flag = flags["v"] b_flag = flags["b"] z_flag = flags["z"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" if s_flag is True: flags += "s" if v_flag is True: flags += "v" if b_flag is True: flags += "b" if z_flag is True: flags += "z" # Configure the r.to.vect module to_vector_module = pymod.Module("r.to.vect", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, type=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning(_("The number of parellel r.to.vect processes was "\ "reduced to 1 because of the table attribute "\ "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset(map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(to_vector_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count%10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count%10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()