def rasterize(options, vectorMaps, stampedMaps, dbif, overwrite): """ Create raster maps from all vector maps and return a list of their names :param options: Named arguments given when calling module :param vectorMaps: Names of vector maps intended to be converted to rasters :param stampedMaps: List of vector maps as objects (with timestamps) :param dbif: SQL database interface connection :param overwrite: boolean saying whether should we overwrite existing maps :return rasterMaps: List of names of newly created raster maps """ rasterMaps = list() for map, layers in vectorMaps.iteritems(): for layer in layers: for mtimMap in stampedMaps: if mtimMap.get_id().split('@')[0] == ':'.join([map, layer]): extent = mtimMap.get_temporal_extent() mapName = '{}_{}'.format(options['basename'], extent.get_start_time()) if ':' in mapName: mapName = '_'.join(mapName.split(':')) if '-' in mapName: mapName = '_'.join(mapName.split('-')) if ' ' in mapName: mapName = '_'.join(mapName.split(' ')) newMap = tgis.open_new_map_dataset(mapName, None, type="raster", temporal_extent=extent, dbif=dbif, overwrite=overwrite) run_command('v.to.rast', input=map, layer=layer, use='attr', attribute_column=options['column'], output=newMap.get_id(), quiet=True, overwrite=overwrite) rasterMaps.append(newMap) return rasterMaps
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] method = options["method"] nprocs = options["nprocs"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] use_raster_region = flags["r"] method = options["method"] nprocs = options["nprocs"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) gregion_module = pymod.Module( "g.region", raster="dummy", run_=False, finish_=False, ) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) if use_raster_region is True: reg = copy.deepcopy(gregion_module) reg(raster=map.get_id()) print(reg.get_bash()) print(mod.get_bash()) mm = pymod.MultiModule([reg, mod], sync=False, set_temp_region=True) process_queue.put(mm) else: print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() proc_list = process_queue.get_finished_modules() # Check return status of all finished modules error = 0 for proc in proc_list: if proc.popen.returncode != 0: grass.error( _("Error running module: %\n stderr: %s") % (proc.get_bash(), proc.outputs.stderr)) error += 1 if error > 0: grass.fatal(_("Error running modules.")) # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] register_null = flags["n"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the HANTS module hants_flags = "" if flags["l"]: hants_flags = hants_flags + 'l' if flags["h"]: hants_flags = hants_flags + 'h' if flags["i"]: hants_flags = hants_flags + 'i' kwargs = dict() kwargs['nf'] = options['nf'] if options['fet']: kwargs['fet'] = options['fet'] kwargs['dod'] = options['dod'] if options['range']: kwargs['range'] = options['range'] kwargs['suffix'] = "_hants" if len(hants_flags) > 0: kwargs['flags'] = hants_flags count = 0 num_maps = len(maps) new_maps = [] maplistfile = script.tempfile() fd = open(maplistfile, 'w') # create list of input maps and their time stamps for map in maps: count += 1 map_name = "{ba}_hants".format(ba=map.get_id()) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) f.write("{0}\n".format(map.get_id())) f.close() # run r.hants grass.run_command('r.hants', file=maplistfile, suffix="_hants", quiet=True, **kwargs) # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(options, flags): # lazy imports import grass.temporal as tgis import grass.pygrass.modules as pymod # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] nprocs = int(options["nprocs"]) step = options["step"] levels = options["levels"] minlevel = options["minlevel"] maxlevel = options["maxlevel"] cut = options["cut"] time_suffix = options["suffix"] register_null = flags["n"] t_flag = flags["t"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning( _("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" # Configure the r.to.vect module contour_module = pymod.Module("r.contour", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, overwrite=overwrite, quiet=True) if step: contour_module.inputs.step = float(step) if minlevel: contour_module.inputs.minlevel = float(minlevel) if maxlevel: contour_module.inputs.maxlevel = float(maxlevel) if levels: contour_module.inputs.levels = levels.split(",") if cut: contour_module.inputs.cut = int(cut) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning( _("The number of parellel r.contour processes was " "reduced to 1 because of the table attribute " "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime( map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffix(base, count, time_suffix) new_map = tgis.open_new_map_dataset( map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(contour_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count % 10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count % 10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps try: if map.load() is not True: continue except FatalError: continue if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()
def main(): # Get the options input = options["input"] output = options["output"] where = options["where"] size = options["size"] base = options["basename"] register_null = flags["n"] method = options["method"] nprocs = options["nprocs"] time_suffix = options["suffix"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = grass.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() grass.warning(_("Space time raster dataset <%s> is empty") % sp.get_id()) return new_sp = tgis.check_new_stds(output, "strds", dbif=dbif, overwrite=overwrite) # Configure the r.neighbor module neighbor_module = pymod.Module("r.neighbors", input="dummy", output="dummy", run_=False, finish_=False, size=int(size), method=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.neighbors all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffic(base, count, time_suffix) new_map = tgis.open_new_map_dataset(map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(neighbor_module) mod(input=map.get_id(), output=new_map.get_id()) print(mod.get_bash()) process_queue.put(mod) # Wait for unfinished processes process_queue.wait() # Open the new space time raster dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype, dbif, overwrite) num_maps = len(new_maps) # collect empty maps to remove them empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count%10 == 0: grass.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_min() is None and \ map.metadata.get_max() is None: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) grass.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True) dbif.close()
def main(options, flags): # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] method = options["type"] nprocs = int(options["nprocs"]) column = options["column"] time_suffix = options["suffix"] register_null = flags["n"] t_flag = flags["t"] s_flag = flags["s"] v_flag = flags["v"] b_flag = flags["b"] z_flag = flags["z"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" if s_flag is True: flags += "s" if v_flag is True: flags += "v" if b_flag is True: flags += "b" if z_flag is True: flags += "z" # Configure the r.to.vect module to_vector_module = pymod.Module("r.to.vect", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, type=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning(_("The number of parellel r.to.vect processes was "\ "reduced to 1 because of the table attribute "\ "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran': suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(), sp.get_granularity()) map_name = "{ba}_{su}".format(ba=base, su=suffix) elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time': suffix = tgis.create_time_suffix(map) map_name = "{ba}_{su}".format(ba=base, su=suffix) else: map_name = tgis.create_numeric_suffic(base, count, time_suffix) new_map = tgis.open_new_map_dataset(map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(to_vector_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count%10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count%10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()
def main(options, flags): # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] method = options["type"] nprocs = int(options["nprocs"]) column = options["column"] register_null = flags["n"] t_flag = flags["t"] s_flag = flags["s"] v_flag = flags["v"] b_flag = flags["b"] z_flag = flags["z"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" if s_flag is True: flags += "s" if v_flag is True: flags += "v" if b_flag is True: flags += "b" if z_flag is True: flags += "z" # Configure the r.to.vect module to_vector_module = pymod.Module("r.to.vect", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, type=method, overwrite=overwrite, quiet=True) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning(_("The number of parellel r.to.vect processes was "\ "reduced to 1 because of the table attribute "\ "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset(map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(to_vector_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count%10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count%10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps map.load() if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()
def main(options, flags): # Get the options input = options["input"] output = options["output"] where = options["where"] base = options["basename"] nprocs = int(options["nprocs"]) step = options["step"] levels = options["levels"] minlevel = options["minlevel"] maxlevel = options["maxlevel"] cut = options["cut"] register_null = flags["n"] t_flag = flags["t"] # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() overwrite = gscript.overwrite() sp = tgis.open_old_stds(input, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif) if not maps: dbif.close() gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id()) return # Check the new stvds new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" if t_flag is True: flags += "t" # Configure the r.to.vect module contour_module = pymod.Module("r.contour", input="dummy", output="dummy", run_=False, finish_=False, flags=flags, overwrite=overwrite, quiet=True) if step: contour_module.inputs.step = float(step) if minlevel: contour_module.inputs.minlevel = float(minlevel) if maxlevel: contour_module.inputs.maxlevel = float(maxlevel) if levels: contour_module.inputs.levels = levels.split(",") if cut: contour_module.inputs.cut = int(cut) # The module queue for parallel execution, except if attribute tables should # be created. Then force single process use if t_flag is False: if nprocs > 1: nprocs = 1 gscript.warning(_("The number of parellel r.contour processes was "\ "reduced to 1 because of the table attribute "\ "creation")) process_queue = pymod.ParallelModuleQueue(int(nprocs)) count = 0 num_maps = len(maps) new_maps = [] # run r.to.vect all selected maps for map in maps: count += 1 map_name = "%s_%i" % (base, count) new_map = tgis.open_new_map_dataset(map_name, None, type="vector", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif) new_maps.append(new_map) mod = copy.deepcopy(contour_module) mod(input=map.get_id(), output=new_map.get_id()) sys.stderr.write(mod.get_bash() + "\n") process_queue.put(mod) if count%10 == 0: gscript.percent(count, num_maps, 1) # Wait for unfinished processes process_queue.wait() # Open the new space time vector dataset ttype, stype, title, descr = sp.get_initial_values() new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype, dbif, overwrite) # collect empty maps to remove them num_maps = len(new_maps) empty_maps = [] # Register the maps in the database count = 0 for map in new_maps: count += 1 if count%10 == 0: gscript.percent(count, num_maps, 1) # Do not register empty maps try: if map.load() is not True: continue except FatalError: continue if map.metadata.get_number_of_primitives() == 0: if not register_null: empty_maps.append(map) continue # Insert map in temporal database map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) gscript.percent(1, 1, 1) # Remove empty maps if len(empty_maps) > 0: names = "" count = 0 for map in empty_maps: if count == 0: count += 1 names += "%s" % (map.get_name()) else: names += ",%s" % (map.get_name()) gscript.run_command("g.remove", flags='f', type='vector', name=names, quiet=True) dbif.close()
def main(): # lazy imports import grass.temporal as tgis # Get the options input = options["input"] output = options["output"] stdstype = options["type"] copy_maps = flags["c"] maptype = "raster" element = "cell" if stdstype == "str3ds": maptype = "raster_3d" element = "g3dcell" elif stdstype == "stvds": maptype = "vector" element = "vector" # Make sure the temporal database exists tgis.init() # Get the current mapset to create the id of the space time dataset mapset = gscript.gisenv()["MAPSET"] inname = input inmapset = mapset if "@" in input: inname, inmapset = input.split("@") outname = output outmapset = mapset if "@" in output: outname, outmapset = output.split("@") if outmapset != mapset: gscript.fatal( _("The output dataset <%s> must be in the current mapset<%s>.") % (input, mapset) ) msgr = tgis.get_tgis_message_interface() dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() old_sp = tgis.open_old_stds(input, stdstype, dbif) old_maps = old_sp.get_registered_maps_as_objects(dbif=dbif) if not old_maps: dbif.close() gscript.warning( _("Empty space-time %s dataset <%s>, nothing to copy") % (maptype, input) ) return overwrite = gscript.overwrite() # Check the new stds new_sp = tgis.check_new_stds(output, stdstype, dbif, overwrite) new_maps = None if copy_maps: gscript.message(_("Copying %s maps to the current mapset...") % maptype) new_maps = [] num_maps = len(old_maps) count = 0 for map in old_maps: count += 1 map_id = map.get_id() map_name = map_id map_mapset = mapset if "@" in map_id: map_name, map_mapset = map_id.split("@") if map_mapset != mapset: found = gscript.find_file(name=map_name, element=element, mapset=mapset) if found["name"] is not None and len(found["name"]) > 0: gscript.fatal( _("A %s map <%s> exists already in the current mapset <%s>.") % (maptype, map_name, mapset) ) kwargs = {maptype: "%s,%s" % (map_id, map_name)} gscript.run_command("g.copy", **kwargs) else: # the map is already in the current mapset gscript.message( _("The %s map <%s> is already in the current mapset, not copying") % (maptype, map_name) ) if count % 10 == 0: msgr.percent(count, num_maps, 1) # We need to build the id if maptype != "vector": map_id = tgis.AbstractMapDataset.build_id(map_name, mapset) else: map_id = tgis.AbstractMapDataset.build_id( map_name, mapset, map.get_layer() ) new_map = tgis.open_new_map_dataset( map_name, None, type="raster", temporal_extent=map.get_temporal_extent(), overwrite=overwrite, dbif=dbif, ) # semantic label semantic_label = map.metadata.get_semantic_label() if semantic_label is not None and semantic_label != "None": new_map.metadata.set_semantic_label(semantic_label) new_maps.append(new_map) else: # don't copy maps, use old maps new_maps = old_maps temporal_type, semantic_type, title, description = old_sp.get_initial_values() new_sp = tgis.open_new_stds( output, stdstype, old_sp.get_temporal_type(), title, description, semantic_type, dbif, gscript.overwrite(), ) # Register the maps in the database num_maps = len(new_maps) count = 0 for map in new_maps: count += 1 # Insert map in temporal database if not map.is_in_db(dbif, mapset): semantic_label = map.metadata.get_semantic_label() map.load() # semantic labels are not yet properly implemented in TGIS if semantic_label is not None and semantic_label != "None": map.metadata.set_semantic_label(semantic_label) map.insert(dbif) new_sp.register_map(map, dbif) # Update the spatio-temporal extent and the metadata table entries new_sp.update_from_registered_maps(dbif) dbif.close()