예제 #1
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    nprocs = options["nprocs"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())
        print(mod.get_bash())
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
예제 #2
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]
    type = options["type"]

    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(
            _("Input and aggregation dataset must have "
              "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(
            _("All registered maps of the aggregation dataset "
              "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds", dbif, gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where,
                                                 order="start_time",
                                                 dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(
        where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(
        granularity_list=granularity_list,
        granularity=gran,
        map_list=map_list,
        topo_list=topo_list,
        basename=base,
        time_suffix=time_suffix,
        offset=offset,
        method=method,
        nprocs=nprocs,
        spatial=None,
        overwrite=gcore.overwrite(),
    )

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values(
        )
        output_strds = tgis.open_new_stds(
            output,
            "strds",
            temporal_type,
            title,
            description,
            semantic_type,
            dbif,
            gcore.overwrite(),
        )
        tgis.register_map_object_list(
            "rast",
            output_list,
            output_strds,
            register_null,
            sp.get_relative_time_unit(),
            dbif,
        )

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
예제 #3
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    gran = options["granularity"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = flags["s"]
    
    topo_list = sampling.split(",")

    tgis.init()
    
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)

    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())
    
    start_time = map_list[0].temporal_extent.get_start_time()

    if sp.is_time_absolute():
        start_time = tgis.adjust_datetime_to_granularity(start_time,  gran)

    # We use the end time first
    end_time = map_list[-1].temporal_extent.get_end_time()
    has_end_time = True

    # In case no end time is available, then we use the start time of the last map layer
    if end_time is None:
        end_time = map_list[- 1].temporal_extent.get_start_time()
        has_end_time = False

    granularity_list = []

    # Build the granularity list
    while True:
        if has_end_time is True:
            if start_time >= end_time:
                break
        else:
            if start_time > end_time:
                break

        granule = tgis.RasterDataset(None)
        start = start_time
        if sp.is_time_absolute():
            end = tgis.increment_datetime_by_string(start_time, gran)
            granule.set_absolute_time(start, end)
        else:
            end = start_time + int(gran)
            granule.set_relative_time(start, end,  sp.get_relative_time_unit())
        start_time = end
        
        granularity_list.append(granule)

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
                                                                       map_list=map_list,  
                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
                                                                       overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values()
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                                                 title, description, semantic_type,
                                                                 dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
                                                       sp.get_relative_time_unit(),  dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
예제 #4
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = flags["s"]
    type = options["type"]
    
    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(_("Input and aggregation dataset must have "
                      "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(_("All registered maps of the aggregation dataset "
                      "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
                                                                       map_list=map_list,  
                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
                                                                       overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values()
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                                                 title, description, semantic_type,
                                                                 dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
                                                       sp.get_relative_time_unit(),  dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
예제 #5
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
예제 #6
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output,
                                 "stvds",
                                 dbif=dbif,
                                 overwrite=overwrite)

    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"

    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour",
                                  input="dummy",
                                  output="dummy",
                                  run_=False,
                                  finish_=False,
                                  flags=flags,
                                  overwrite=overwrite,
                                  quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(
                _("The number of parellel r.contour processes was "
                  "reduced to 1 because of the table attribute "
                  "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1

        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="vector",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype,
                                dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove",
                            flags='f',
                            type='vector',
                            name=names,
                            quiet=True)

    dbif.close()
예제 #7
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    register_null = flags["n"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the HANTS module
    hants_flags = ""
    if flags["l"]:
        hants_flags = hants_flags + 'l'
    if flags["h"]:
        hants_flags = hants_flags + 'h'
    if flags["i"]:
        hants_flags = hants_flags + 'i'

    kwargs = dict()
    kwargs['nf'] = options['nf']
    if options['fet']:
        kwargs['fet'] = options['fet']
    kwargs['dod'] = options['dod']
    if options['range']:
        kwargs['range'] = options['range']
    kwargs['suffix'] = "_hants"
    if len(hants_flags) > 0:
        kwargs['flags'] = hants_flags

    count = 0
    num_maps = len(maps)
    new_maps = []

    maplistfile = script.tempfile()
    fd = open(maplistfile, 'w')

    # create list of input maps and their time stamps
    for map in maps:
        count += 1
        map_name = "{ba}_hants".format(ba=map.get_id())

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        f.write("{0}\n".format(map.get_id()))

    f.close()

    # run r.hants
    grass.run_command('r.hants',
                      file=maplistfile,
                      suffix="_hants",
                      quiet=True,
                      **kwargs)

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
예제 #8
0
def main():
    #lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    vector_output = options["vector_output"]
    strds = options["strds"]
    where = options["where"]
    columns = options["columns"]

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")

    if len(strds_names) != len(column_names):
        grass.fatal(_("The number of columns must be equal to the number of space time raster datasets"))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = grass.gisenv()["MAPSET"]

    out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite)

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)

    # Single space time raster dataset
    if len(strds_names) == 1:
        rows = first_strds.get_registered_maps(
            columns="name,mapset,start_time,end_time",
            order="start_time", dbif=dbif)

        if not rows:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> is empty") %
                        out_sp.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [row["name"] + "@" + row["mapset"],]

            s = Sample(start, end, raster_maps)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(_("Temporal type of space time raster datasets must be equal\n"
                              "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%
                              {"a":first_strds.get_id(),
                               "type_a":first_strds.get_temporal_type(),
                               "b":dataset.get_id(),
                               "type_b":dataset.get_temporal_type()}))

        mapmatrizes = tgis.sample_stds_by_stds_topology("strds", "strds", strds_names,
                                                      strds_names[0], False, None,
                                                      "equal", False, False)

        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list)
                samples.append(s)

    num_samples = len(samples)

    # Get the layer and database connections of the input vector
    vector_db = grass.vector.vector_db(input)

    # We copy the vector table and create the new layers
    if vector_db:
        # Use the first layer to copy the categories from
        layers = "1,"
    else:
        layers = ""
    first = True
    for layer in range(num_samples):
        layer += 1
        # Skip existing layer
        if vector_db and layer in vector_db and \
           vector_db[layer]["layer"] == layer:
            continue
        if first:
            layers += "%i" % (layer)
            first = False
        else:
            layers += ",%i" % (layer)

    vectmap = vector_output

    # We create a new vector map using the categories of the original map
    try:
        grass.run_command("v.category", input=input, layer=layers,
                          output=vectmap, option="transfer",
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to create new layers for vector map <%s>")
                    % (vectmap))

    title = _("Observaion of space time raster dataset(s) <%s>") % (strds)
    description= _("Observation of space time raster dataset(s) <%s>"
                   " with vector map <%s>") % (strds, input)

    # Create the output space time vector dataset
    out_sp = tgis.open_new_stds(output, "stvds",
                                              first_strds.get_temporal_type(),
                                              title, description,
                                              first_strds.get_semantic_type(),
                                              dbif, overwrite)

    dummy = out_sp.get_new_map_instance(None)

    # Sample the space time raster dataset with the vector
    # map at specific layer with v.what.rast
    count = 1
    for sample in samples:
        raster_names = sample.raster_names

        if len(raster_names) != len(column_names):
            grass.fatal(_("The number of raster maps in a granule must "
                          "be equal to the number of column names"))

        # Create the columns creation string
        columns_string = ""
        for name, column in zip(raster_names, column_names):
            # The column is by default double precision
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"

            tmp_string = "%s %s," %(column, coltype)
            columns_string += tmp_string

        # Remove last comma
        columns_string = columns_string[0:len(columns_string) - 1]

        # Try to add a column
        if vector_db and count in vector_db and vector_db[count]["table"]:
            try:
                grass.run_command("v.db.addcolumn", map=vectmap,
                                  layer=count, column=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add column %s to vector map <%s> "
                              "with layer %i") % (columns_string, vectmap, count))
        else:
            # Try to add a new table
            grass.message("Add table to layer %i" % (count))
            try:
                grass.run_command("v.db.addtable", map=vectmap, layer=count,
                                  columns=columns_string, overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add table to vector map "
                              "<%s> with layer %i") % (vectmap, count))

        # Call v.what.rast for each raster map
        for name, column in zip(raster_names, column_names):
            try:
                grass.run_command("v.what.rast", map=vectmap,
                                  layer=count, raster=name,
                                  column=column, where=where)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to run v.what.rast for vector map <%s> "
                            "with layer %i and raster map <%s>") %
                            (vectmap, count, str(raster_names)))

        vect = out_sp.get_new_map_instance(dummy.build_id(vectmap,
                                                          mapset, str(count)))
        vect.load()

        start = sample.start
        end = sample.end

        if out_sp.is_time_absolute():
            vect.set_absolute_time(start, end)
        else:
            vect.set_relative_time(
                start, end, first_strds.get_relative_time_unit())

        if vect.is_in_db(dbif):
            vect.update_all(dbif)
        else:
            vect.insert(dbif)

        out_sp.register_map(vect, dbif)
        count += 1

    out_sp.update_from_registered_maps(dbif)
    dbif.close()
예제 #9
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output, "strds", dbif=dbif,
                                               overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, size=int(size),
                                   method=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffic(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(map_name, None, type="raster",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())
        print(mod.get_bash())
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title,
                                              descr, stype, dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True)

    dbif.close()
예제 #10
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    method = options["type"]
    nprocs = int(options["nprocs"])
    column = options["column"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]
    s_flag = flags["s"]
    v_flag = flags["v"]
    b_flag = flags["b"]
    z_flag = flags["z"]
    
    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    if s_flag is True:
        flags += "s"
    if v_flag is True:
        flags += "v"
    if b_flag is True:
        flags += "b"
    if z_flag is True:
        flags += "z"
    
    # Configure the r.to.vect module
    to_vector_module = pymod.Module("r.to.vect", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   type=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.to.vect processes was "\
                               "reduced to 1 because of the table attribute "\
                               "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffic(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(to_vector_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
예제 #11
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    vector_output = options["vector_output"]
    strds = options["strds"]
    where = options["where"]
    columns = options["columns"]

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")

    if len(strds_names) != len(column_names):
        grass.fatal(_("The number of columns must be equal to the number of space time raster datasets"))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = grass.gisenv()["MAPSET"]

    out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite)

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)

    # Single space time raster dataset
    if len(strds_names) == 1:
        rows = first_strds.get_registered_maps(
            columns="name,mapset,start_time,end_time", 
            order="start_time", dbif=dbif)

        if not rows:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> is empty") %
                        out_sp.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [row["name"] + "@" + row["mapset"],]

            s = Sample(start, end, raster_maps)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(_("Temporal type of space time raster datasets must be equal\n"
                              "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%\
                              {"a":first_strds.get_id(),
                               "type_a":first_strds.get_temporal_type(),
                               "b":dataset.get_id(),
                               "type_b":dataset.get_temporal_type()}))

        mapmatrizes = tgis.sample_stds_by_stds_topology("strds", "strds", strds_names,
                                                      strds_names[0], False, None,
                                                      "equal", False, False)

        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:
                
                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list)
                samples.append(s)

    num_samples = len(samples)

    # Get the layer and database connections of the input vector
    vector_db = grass.vector.vector_db(input)

    # We copy the vector table and create the new layers
    if vector_db:
        # Use the first layer to copy the categories from
        layers = "1,"
    else:
        layers = ""
    first = True
    for layer in range(num_samples):
        layer += 1
        # Skip existing layer
        if vector_db and layer in vector_db and \
           vector_db[layer]["layer"] == layer:
            continue
        if first:
            layers += "%i" % (layer)
            first = False
        else:
            layers += ",%i" % (layer)

    vectmap = vector_output

    # We create a new vector map using the categories of the original map
    try:
        grass.run_command("v.category", input=input, layer=layers,
                          output=vectmap, option="transfer",
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to create new layers for vector map <%s>")
                    % (vectmap))

    title = _("Observaion of space time raster dataset(s) <%s>") % (strds)
    description= _("Observation of space time raster dataset(s) <%s>"
                   " with vector map <%s>") % (strds, input)

    # Create the output space time vector dataset
    out_sp = tgis.open_new_stds(output, "stvds",
                                              first_strds.get_temporal_type(),
                                              title, description,
                                              first_strds.get_semantic_type(),
                                              dbif, overwrite)

    dummy = out_sp.get_new_map_instance(None)

    # Sample the space time raster dataset with the vector
    # map at specific layer with v.what.rast
    count = 1
    for sample in samples:
        raster_names = sample.raster_names

        if len(raster_names) != len(column_names):
            grass.fatal(_("The number of raster maps in a granule must "
                          "be equal to the number of column names"))

        # Create the columns creation string
        columns_string = ""
        for name, column in zip(raster_names, column_names):
            # The column is by default double precision
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"

            tmp_string = "%s %s,"%(column, coltype)
            columns_string += tmp_string

        # Remove last comma
        columns_string = columns_string[0:len(columns_string) - 1]

        # Try to add a column
        if vector_db and count in vector_db and vector_db[count]["table"]:
            try:
                grass.run_command("v.db.addcolumn", map=vectmap,
                                  layer=count, column=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add column %s to vector map <%s> "
                              "with layer %i") % (columns_string, vectmap, count))
        else:
            # Try to add a new table
            grass.message("Add table to layer %i" % (count))
            try:
                grass.run_command("v.db.addtable", map=vectmap, layer=count,
                                  columns=columns_string, overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add table to vector map "
                              "<%s> with layer %i") % (vectmap, count))

        # Call v.what.rast for each raster map
        for name, column in zip(raster_names, column_names):
            try:
                grass.run_command("v.what.rast", map=vectmap,
                                  layer=count, raster=name,
                                  column=column, where=where)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to run v.what.rast for vector map <%s> "
                            "with layer %i and raster map <%s>") % \
                            (vectmap, count, str(raster_names)))

        vect = out_sp.get_new_map_instance(dummy.build_id(vectmap,
                                                          mapset, str(count)))
        vect.load()
        
        start = sample.start
        end = sample.end
        
        if out_sp.is_time_absolute():
            vect.set_absolute_time(start, end)
        else:
            vect.set_relative_time(
                start, end, first_strds.get_relative_time_unit())

        if vect.is_in_db(dbif):
            vect.update_all(dbif)
        else:
            vect.insert(dbif)

        out_sp.register_map(vect, dbif)
        count += 1

    out_sp.update_from_registered_maps(dbif)
    dbif.close()
예제 #12
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    method = options["type"]
    nprocs = int(options["nprocs"])
    column = options["column"]

    register_null = flags["n"]
    t_flag = flags["t"]
    s_flag = flags["s"]
    v_flag = flags["v"]
    b_flag = flags["b"]
    z_flag = flags["z"]
    
    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    if s_flag is True:
        flags += "s"
    if v_flag is True:
        flags += "v"
    if b_flag is True:
        flags += "b"
    if z_flag is True:
        flags += "z"
    
    # Configure the r.to.vect module
    to_vector_module = pymod.Module("r.to.vect", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   type=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.to.vect processes was "\
                               "reduced to 1 because of the table attribute "\
                               "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(to_vector_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
예제 #13
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]

    register_null = flags["n"]
    t_flag = flags["t"]
    

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    
    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   overwrite=overwrite,
                                   quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.contour processes was "\
                              "reduced to 1 because of the table attribute "\
                              "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
예제 #14
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    stdstype = options["type"]
    copy_maps = flags["c"]

    maptype = "raster"
    element = "cell"
    if stdstype == "str3ds":
        maptype = "raster_3d"
        element = "g3dcell"
    elif stdstype == "stvds":
        maptype = "vector"
        element = "vector"

    # Make sure the temporal database exists
    tgis.init()

    # Get the current mapset to create the id of the space time dataset
    mapset = gscript.gisenv()["MAPSET"]

    inname = input
    inmapset = mapset
    if "@" in input:
        inname, inmapset = input.split("@")

    outname = output
    outmapset = mapset
    if "@" in output:
        outname, outmapset = output.split("@")
        if outmapset != mapset:
            gscript.fatal(
                _("The output dataset <%s> must be in the current mapset<%s>.")
                % (input, mapset)
            )

    msgr = tgis.get_tgis_message_interface()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    old_sp = tgis.open_old_stds(input, stdstype, dbif)
    old_maps = old_sp.get_registered_maps_as_objects(dbif=dbif)

    if not old_maps:
        dbif.close()
        gscript.warning(
            _("Empty space-time %s dataset <%s>, nothing to copy") % (maptype, input)
        )
        return

    overwrite = gscript.overwrite()

    # Check the new stds
    new_sp = tgis.check_new_stds(output, stdstype, dbif, overwrite)

    new_maps = None
    if copy_maps:
        gscript.message(_("Copying %s maps to the current mapset...") % maptype)
        new_maps = []
        num_maps = len(old_maps)
        count = 0

        for map in old_maps:
            count += 1
            map_id = map.get_id()
            map_name = map_id
            map_mapset = mapset
            if "@" in map_id:
                map_name, map_mapset = map_id.split("@")

            if map_mapset != mapset:
                found = gscript.find_file(name=map_name, element=element, mapset=mapset)
                if found["name"] is not None and len(found["name"]) > 0:
                    gscript.fatal(
                        _("A %s map <%s> exists already in the current mapset <%s>.")
                        % (maptype, map_name, mapset)
                    )

                kwargs = {maptype: "%s,%s" % (map_id, map_name)}
                gscript.run_command("g.copy", **kwargs)
            else:
                # the map is already in the current mapset
                gscript.message(
                    _("The %s map <%s> is already in the current mapset, not copying")
                    % (maptype, map_name)
                )

            if count % 10 == 0:
                msgr.percent(count, num_maps, 1)

            # We need to build the id
            if maptype != "vector":
                map_id = tgis.AbstractMapDataset.build_id(map_name, mapset)
            else:
                map_id = tgis.AbstractMapDataset.build_id(
                    map_name, mapset, map.get_layer()
                )

            new_map = tgis.open_new_map_dataset(
                map_name,
                None,
                type="raster",
                temporal_extent=map.get_temporal_extent(),
                overwrite=overwrite,
                dbif=dbif,
            )
            # semantic label
            semantic_label = map.metadata.get_semantic_label()
            if semantic_label is not None and semantic_label != "None":
                new_map.metadata.set_semantic_label(semantic_label)

            new_maps.append(new_map)
    else:
        # don't copy maps, use old maps
        new_maps = old_maps

    temporal_type, semantic_type, title, description = old_sp.get_initial_values()
    new_sp = tgis.open_new_stds(
        output,
        stdstype,
        old_sp.get_temporal_type(),
        title,
        description,
        semantic_type,
        dbif,
        gscript.overwrite(),
    )

    # Register the maps in the database
    num_maps = len(new_maps)
    count = 0
    for map in new_maps:
        count += 1

        # Insert map in temporal database
        if not map.is_in_db(dbif, mapset):
            semantic_label = map.metadata.get_semantic_label()
            map.load()
            # semantic labels are not yet properly implemented in TGIS
            if semantic_label is not None and semantic_label != "None":
                map.metadata.set_semantic_label(semantic_label)
            map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)

    dbif.close()