示例#1
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
示例#2
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output,
                                 "stvds",
                                 dbif=dbif,
                                 overwrite=overwrite)

    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"

    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour",
                                  input="dummy",
                                  output="dummy",
                                  run_=False,
                                  finish_=False,
                                  flags=flags,
                                  overwrite=overwrite,
                                  quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(
                _("The number of parellel r.contour processes was "
                  "reduced to 1 because of the table attribute "
                  "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1

        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="vector",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype,
                                dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove",
                            flags='f',
                            type='vector',
                            name=names,
                            quiet=True)

    dbif.close()
示例#3
0
def main():
    # lazy imports
    import grass.temporal as tgis
    from grass.pygrass.modules import Module

    # Get the options
    input = options["input"]
    output = options["output"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    lower = options["lower"]
    upper = options["upper"]
    offset = options["offset"]
    limits = options["limits"]
    shift = options["shift"]
    scale = options["scale"]
    method = options["method"]
    granularity = options["granularity"]
    register_null = flags["n"]
    reverse = flags["r"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time raster dataset <%s> not found") % (id))

    input_strds.select(dbif)

    if output.find("@") >= 0:
        out_id = output
    else:
        out_id = output + "@" + mapset

    # The output space time raster dataset
    output_strds = tgis.SpaceTimeRasterDataset(out_id)
    if output_strds.is_in_db(dbif):
        if not grass.overwrite():
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is already in the "
                  "database, use overwrite flag to overwrite") % out_id)

    if tgis.check_granularity_string(granularity,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid granularity"))

    if tgis.check_granularity_string(cycle,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid cycle"))

    if offset:
        if tgis.check_granularity_string(
                offset, input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid offset"))

    # The lower threshold space time raster dataset
    if lower:
        if not range:
            dbif.close()
            grass.fatal(
                _("You need to set the range to compute the occurrence"
                  " space time raster dataset"))

        if lower.find("@") >= 0:
            lower_id = lower
        else:
            lower_id = lower + "@" + mapset

        lower_strds = tgis.SpaceTimeRasterDataset(lower_id)
        if lower_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (lower_strds.get_id()))

        if lower_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and lower strds must be equal")
            )

        lower_strds.select(dbif)

    # The upper threshold space time raster dataset
    if upper:
        if not lower:
            dbif.close()
            grass.fatal(
                _("The upper option works only in conjunction with the lower option"
                  ))

        if upper.find("@") >= 0:
            upper = upper
        else:
            upper_id = upper + "@" + mapset

        upper_strds = tgis.SpaceTimeRasterDataset(upper_id)
        if upper_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (upper_strds.get_id()))

        if upper_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and upper strds must be equal")
            )

        upper_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple(
    )

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
        start = tgis.adjust_datetime_to_granularity(start, granularity)
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    limit_relations = [
        "EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS"
    ]

    count = 1
    output_maps = []

    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start),
                                                                  str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.message(_("Processing cycle %s - %s" % (str(start), str(end))))

        if len(input_maps) == 0:
            continue

        # Lets create a dummy list of maps with granularity conform intervals
        gran_list = []
        gran_list_low = []
        gran_list_up = []
        gran_start = start
        while gran_start < end:
            map = input_strds.get_new_map_instance("%i@%i" % (count, count))
            if input_strds.is_time_absolute():
                gran_end = tgis.increment_datetime_by_string(
                    gran_start, granularity)
                map.set_absolute_time(gran_start, gran_end)
                gran_start = tgis.increment_datetime_by_string(
                    gran_start, granularity)
            else:
                gran_end = gran_start + granularity
                map.set_relative_time(gran_start, gran_end,
                                      input_strds.get_relative_time_unit())
                gran_start = gran_start + granularity
            gran_list.append(copy(map))
            gran_list_low.append(copy(map))
            gran_list_up.append(copy(map))
        # Lists to compute the topology with upper and lower datasets

        # Create the topology between the granularity conform list and all maps
        # of the current cycle
        gran_topo = tgis.SpatioTemporalTopologyBuilder()
        gran_topo.build(gran_list, input_maps)

        if lower:
            lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_lower_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_lower_topo.build(gran_list_low, lower_maps)

        if upper:
            upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_upper_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_upper_topo.build(gran_list_up, upper_maps)

        old_map_name = None

        # Aggregate
        num_maps = len(gran_list)

        for i in range(num_maps):
            if reverse:
                map = gran_list[num_maps - i - 1]
            else:
                map = gran_list[i]
            # Select input maps based on temporal topology relations
            input_maps = []
            if map.get_equal():
                input_maps += map.get_equal()
            elif map.get_contains():
                input_maps += map.get_contains()
            elif map.get_overlaps():
                input_maps += map.get_overlaps()
            elif map.get_overlapped():
                input_maps += map.get_overlapped()
            elif map.get_during():
                input_maps += map.get_during()

            # Check input maps
            if len(input_maps) == 0:
                continue

            # New output map
            if input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    map.temporal_extent.get_start_time(),
                    input_strds.get_granularity())
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            elif input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'time':
                suffix = tgis.create_time_suffix(map)
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            else:
                output_map_name = tgis.create_numeric_suffix(
                    base, count, time_suffix)

            output_map_id = map.build_id(output_map_name, mapset)
            output_map = input_strds.get_new_map_instance(output_map_id)

            # Check if new map is in the temporal database
            if output_map.is_in_db(dbif):
                if grass.overwrite():
                    # Remove the existing temporal database entry
                    output_map.delete(dbif)
                    output_map = input_strds.get_new_map_instance(
                        output_map_id)
                else:
                    grass.fatal(
                        _("Map <%s> is already registered in the temporal"
                          " database, use overwrite flag to overwrite.") %
                        (output_map.get_map_id()))

            map_start, map_end = map.get_temporal_extent_as_tuple()

            if map.is_time_absolute():
                output_map.set_absolute_time(map_start, map_end)
            else:
                output_map.set_relative_time(map_start, map_end,
                                             map.get_relative_time_unit())

            limits_vals = limits.split(",")
            limits_lower = float(limits_vals[0])
            limits_upper = float(limits_vals[1])

            lower_map_name = None
            if lower:
                relations = gran_list_low[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        lower_map_name = str(relations[relation][0].get_id())
                        break

            upper_map_name = None
            if upper:
                relations = gran_list_up[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        upper_map_name = str(relations[relation][0].get_id())
                        break

            input_map_names = []
            for input_map in input_maps:
                input_map_names.append(input_map.get_id())

            # Set up the module
            accmod = Module("r.series.accumulate",
                            input=input_map_names,
                            output=output_map_name,
                            run_=False)

            if old_map_name:
                accmod.inputs["basemap"].value = old_map_name
            if lower_map_name:
                accmod.inputs["lower"].value = lower_map_name
            if upper_map_name:
                accmod.inputs["upper"].value = upper_map_name

            accmod.inputs["limits"].value = (limits_lower, limits_upper)

            if shift:
                accmod.inputs["shift"].value = float(shift)

            if scale:
                accmod.inputs["scale"].value = float(scale)

            if method:
                accmod.inputs["method"].value = method

            print(accmod)
            accmod.run()

            if accmod.popen.returncode != 0:
                dbif.close()
                grass.fatal(_("Error running r.series.accumulate"))

            output_maps.append(output_map)
            old_map_name = output_map_name
            count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)

            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    # Insert the maps into the output space time dataset
    if output_strds.is_in_db(dbif):
        if grass.overwrite():
            output_strds.delete(dbif)
            output_strds = input_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = input_strds.get_initial_values(
    )
    output_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    output_strds.insert(dbif)

    empty_maps = []
    # Register the maps in the database
    count = 0
    for output_map in output_maps:
        count += 1
        if count % 10 == 0:
            grass.percent(count, len(output_maps), 1)
        # Read the raster map data
        output_map.load()
        # In case of a empty map continue, do not register empty maps

        if not register_null:
            if output_map.metadata.get_min() is None and \
                output_map.metadata.get_max() is None:
                empty_maps.append(output_map)
                continue

        # Insert map in temporal database
        output_map.insert(dbif)
        output_strds.register_map(output_map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    output_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove",
                              flags='f',
                              type="raster",
                              name=map.get_name(),
                              quiet=True)
示例#4
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    base = options["basename"]
    where = options["where"]
    nprocs = options["nprocs"]
    tsuffix = options["suffix"]

    mapset = grass.encode(grass.gisenv()["MAPSET"])

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds")

    maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif)

    num = len(maps)

    # Configure the r.to.vect module
    gapfill_module = pymod.Module(
        "r.series.interp",
        overwrite=grass.overwrite(),
        quiet=True,
        run_=False,
        finish_=False,
    )

    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    gap_list = []
    overwrite_flags = {}

    # Identify all gaps and create new names
    count = 0
    for _map in maps:
        if _map.get_id() is None:
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix in [
                    'gran', 'time'
            ]:
                _id = "{ba}@{ma}".format(ba=base, ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(base, num + count,
                                                      tsuffix)
                _id = "{name}@{ma}".format(name=map_name, ma=mapset)
            _map.set_id(_id)

            gap_list.append(_map)

    if len(gap_list) == 0:
        grass.message(_("No gaps found"))
        return

    # Build the temporal topology
    tb = tgis.SpatioTemporalTopologyBuilder()
    tb.build(maps)

    # Do some checks before computation
    for _map in gap_list:
        if not _map.get_precedes() or not _map.get_follows():
            grass.fatal(
                _("Unable to determine successor "
                  "and predecessor of a gap."))

        if len(_map.get_precedes()) > 1:
            grass.warning(
                _("More than one successor of the gap found. "
                  "Using the first found."))

        if len(_map.get_follows()) > 1:
            grass.warning(
                _("More than one predecessor of the gap found. "
                  "Using the first found."))

    # Interpolate the maps using parallel processing
    result_list = []

    for _map in gap_list:
        predecessor = _map.get_follows()[0]
        successor = _map.get_precedes()[0]

        gran = sp.get_granularity()
        tmpval, start = predecessor.get_temporal_extent_as_tuple()
        end, tmpval = successor.get_temporal_extent_as_tuple()

        # Now resample the gap
        map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity(
            (_map, ), start, end, gran)

        map_names = []
        map_positions = []

        increment = 1.0 / (len(map_matrix) + 1.0)
        position = increment
        count = 0
        for intp_list in map_matrix:
            new_map = intp_list[0]
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    new_map.temporal_extent.get_start_time(),
                    sp.get_granularity())
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            elif sp.get_temporal_type() == 'absolute' and tsuffix == 'time':
                suffix = tgis.create_time_suffix(new_map)
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(new_map.get_name(),
                                                      count, tsuffix)
                new_id = "{name}@{ma}".format(name=map_name, ma=mapset)

            new_map.set_id(new_id)

            overwrite_flags[new_id] = False
            if new_map.map_exists() or new_map.is_in_db(dbif):
                if not grass.overwrite():
                    grass.fatal(
                        _("Map with name <%s> already exists. "
                          "Please use another base name." % (_id)))
                else:
                    if new_map.is_in_db(dbif):
                        overwrite_flags[new_id] = True

            map_names.append(new_map.get_name())
            map_positions.append(position)
            position += increment

            result_list.append(new_map)

        mod = copy.deepcopy(gapfill_module)
        mod(input=(predecessor.get_map_id(), successor.get_map_id()),
            datapos=(0, 1),
            output=map_names,
            samplingpos=map_positions)
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Insert new interpolated maps in temporal database and dataset
    for _map in result_list:
        id = _map.get_id()
        if overwrite_flags[id] == True:
            if _map.is_time_absolute():
                start, end = _map.get_absolute_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_absolute_time(start, end)
            else:
                start, end, unit = _map.get_relative_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_relative_time(start, end, unit)
        _map.load()
        _map.insert(dbif)
        sp.register_map(_map, dbif)

    sp.update_from_registered_maps(dbif)
    sp.update_command_string(dbif=dbif)
    dbif.close()