Пример #1
0
    def p_buffer_operation(self, t):
        """
        expression : buff_function LPAREN name COMMA number RPAREN
                   | buff_function LPAREN expression COMMA number RPAREN
        """
        # Generate an intermediate name
        name = self.generate_vector_map_name()

        # Assign ids to expressions, names and operators.
        mapid = 3
        operatorid = 5

        if t[1] == "buff_p":
            if self.debug:
                print("v.buffer input=%s type=point distance=%g output=%s" %
                      (t[mapid], t[operatorid], name))

            if self.run:
                m = mod.Module(
                    "v.buffer",
                    type="point",
                    input=t[mapid],
                    distance=float(t[operatorid]),
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name
        elif t[1] == "buff_l":
            if self.debug:
                print("v.buffer input=%s type=line distance=%g output=%s" %
                      (t[mapid], t[operatorid], name))

            if self.run:
                m = mod.Module(
                    "v.buffer",
                    type="line",
                    input=t[mapid],
                    distance=float(t[operatorid]),
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name
        elif t[1] == "buff_a":
            if self.debug:
                print("v.buffer input=%s type=area distance=%g output=%s" %
                      (t[mapid], t[operatorid], name))

            if self.run:
                m = mod.Module(
                    "v.buffer",
                    type="area",
                    input=t[mapid],
                    distance=float(t[operatorid]),
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name
Пример #2
0
    def __init__(
        self,
        pid=None,
        run=False,
        debug=True,
        spatial=False,
        register_null=False,
        dry_run=False,
        nprocs=1,
        time_suffix=None,
    ):

        TemporalRasterBaseAlgebraParser.__init__(
            self,
            pid=pid,
            run=run,
            debug=debug,
            spatial=spatial,
            register_null=register_null,
            dry_run=dry_run,
            nprocs=nprocs,
            time_suffix=time_suffix,
        )

        if spatial is True:
            self.m_mapcalc = pymod.Module("r.mapcalc", region="union", run_=False)
        else:
            self.m_mapcalc = pymod.Module("r.mapcalc")
        self.m_mremove = pymod.Module("g.remove")
Пример #3
0
    def __init__(self, pid=None, run=False, debug=True, spatial=False):
        TemporalAlgebraParser.__init__(self, pid, run, debug, spatial)

        self.m_overlay = pygrass.Module("v.overlay", quiet=True, run_=False)
        self.m_rename = pygrass.Module("g.rename", quiet=True, run_=False)
        self.m_patch = pygrass.Module("v.patch", quiet=True, run_=False)
        self.m_mremove = pygrass.Module("g.remove", quiet=True, run_=False)
        self.m_buffer = pygrass.Module("v.buffer", quiet=True, run_=False)
Пример #4
0
 def remove_intermediate_vector_maps(self):
     if self.debug:
         for name in self.names:
             print("g.remove type=vector name=%s -f" % (name))
     if self.run:
         for name in self.names:
             m = mod.Module('g.remove', type='vector', name=name,
                            flags='f', run_=False)
             self.cmdlist.add_cmd(m)
Пример #5
0
    def g_region(self):
        """" To Validate the Output """

        # Configure a g.region raster='elevation' test

        module = gmodules.Module("g.region", raster="elevation")

        self.run_module(module=module)

        self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #6
0
    def __init__(self,
                 pid=None,
                 run=False,
                 debug=True,
                 spatial=False,
                 register_null=False,
                 dry_run=False,
                 nprocs=1):

        TemporalRasterBaseAlgebraParser.__init__(self,
                                                 pid=pid,
                                                 run=run,
                                                 debug=debug,
                                                 spatial=spatial,
                                                 register_null=register_null,
                                                 dry_run=dry_run,
                                                 nprocs=nprocs)

        self.m_mapcalc = pymod.Module('r3.mapcalc')
        self.m_mremove = pymod.Module('g.remove')
Пример #7
0
    def test_flag_ui(self):
        """Test to validate the output of r.flip using flag "g"
        """
        # Configure a r.flip flag="ui" test
        # Force launching GUI dialog
        module = gmodules.Module("r.flip",
                                 input='test',
                                 output='test',
                                 flags="ui")

        self.run_module(module=module)
        # it is not clear where to store stdout and stderr
        self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #8
0
    def test_flag_overwrite(self):
        """Test to validate the output of r.flip using flag "g"
        """
        # Configure a r.flip flag= "overwrite" test
        # Allow output files to overwrite existing files
        module = gmodules.Module("r.flip",
                                 input='test',
                                 output='test',
                                 flags="overwrite")

        self.run_module(module=module)
        # it is not clear where to store stdout and stderr
        self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #9
0
    def test_flag_g(self):
        """Test to validate the output of r.flip using flag "g"
        """
        # Configure a r.flip flag= "help" test
        # Print usage summary
        module = gmodules.Module("r.flip",
                                 input='test',
                                 output='test',
                                 flags="help")

        self.run_module(module=module)
        # it is not clear where to store stdout and stderr
        self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #10
0
        def test_flag_verbose(self):

            # Configure a r.gradient flag= "verbose" test

            # North_South Direction

            module = gmodules.Module("r.gradient",
                                     output='test',
                                     direction='N-S',
                                     range='10, 20',
                                     flags="verbose")

            self.run_module(module=module)

            # it is not clear where to store stdout and stderr

            self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #11
0
    def test_flag_verbose(self):

        # Configure a r.stream.slope flag= "verbose" test

        module = gmodules.Module("r.stream.slope",
                                 direction='dir',
                                 elevation="elevation",
                                 gradient='downstream_gradient',
                                 maxcurv='downstream_maxcurv',
                                 mincurv='downstream_mincurv',
                                 flags="verbose")

        self.run_module(module=module)

        # it is not clear where to store stdout and stderr

        self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #12
0
        def test_flag_ui(self):
            """" To Validate the Output """

            # Configure a r.gradient flag= "Overwrite" test

            # North-East_South-West Direction

            module = gmodules.Module("r.gradient",
                                     output='test',
                                     direction='NE-SW',
                                     range='10, 20',
                                     flags="ui")

            self.run_module(module=module)

            # it is not clear where to store stdout and stderr

            self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #13
0
        def test_flag_help(self):
            """" To Validate the Output """

            # Configure a r.gradient flag= "help" test

            # South_North Direction

            module = gmodules.Module("r.gradient",
                                     output='test',
                                     direction='S-N',
                                     range='10, 20',
                                     flags="help")

            self.run_module(module=module)

            # it is not clear where to store stdout and stderr

            self.assertStdout(actual=module.stdout, reference="r_info_g.ref")
Пример #14
0
    def p_statement_assign(self, t):
        """
        statement : NAME EQUALS expression
                  | NAME EQUALS name
                  | NAME EQUALS paren_name
        """
        # We remove the invalid vector name from the list
        if t[3] in self.names:
            self.names.pop(t[3])

        # We rename the resulting vector map
        if self.debug:
            print("g.rename vector=%s,%s" % (t[3], t[1]))

        if self.run:
            m = mod.Module('g.rename', vector=(t[3], t[1]),
                           overwrite=grass.overwrite(), run_=False)
            self.cmdlist.add_cmd(m)
        self.remove_intermediate_vector_maps()
Пример #15
0
def main():
    strds = options["input"]
    where = options["where"]
    nprocs = int(options["nprocs"])

    nullmod = pymod.Module("r.null")
    nullmod.flags.quiet = True
    if options["null"]:
        nullmod.inputs.null = options["null"]
    elif options["setnull"]:
        nullmod.inputs.setnull = options["setnull"]
    else:
        gscript.fatal(_("Please set 'null' or 'setnull' option"))

    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where, "start_time", None)
    if maps is None:
        gscript.fatal(
            _("Space time raster dataset {st} seems to be "
              "empty".format(st=strds)))
        return 1
    # module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)

    for mapp in maps:
        count += 1
        mod = copy.deepcopy(nullmod)
        mod.inputs.map = mapp.get_id()
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()
Пример #16
0
    def p_bool_and_operation(self, t):
        """
        expression : name AND name
                   | expression AND name
                   | name AND expression
                   | expression AND expression
                   | name OR name
                   | expression OR name
                   | name OR expression
                   | expression OR expression
                   | name XOR name
                   | expression XOR name
                   | name XOR expression
                   | expression XOR expression
                   | name NOT name
                   | expression NOT name
                   | name NOT expression
                   | expression NOT expression
                   | name DISOR name
                   | expression DISOR name
                   | name DISOR expression
                   | expression DISOR expression
        """

        # Generate an intermediate name
        name = self.generate_vector_map_name()

        # Assign ids to expressions, names and operators.
        firstid = 1
        secondid = 3
        operatorid = 2

        # Define operation commands.
        if t[operatorid] == "&":
            if self.debug:
                print("v.overlay operator=and ainput=%s binput=%s output=%s" %
                      (t[firstid], t[secondid], name))

            if self.run:
                m = mod.Module(
                    "v.overlay",
                    operator="and",
                    ainput=t[firstid],
                    binput=t[secondid],
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name

        elif t[operatorid] == "|":
            if self.debug:
                print("v.overlay operator=or ainput=%s binput=%s output=%s" %
                      (t[firstid], t[secondid], name))

            if self.run:
                m = mod.Module(
                    "v.overlay",
                    operator="or",
                    ainput=t[firstid],
                    binput=t[secondid],
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name

        elif t[operatorid] == "^":
            if self.debug:
                print("v.overlay operator=xor ainput=%s binput=%s output=%s" %
                      (t[firstid], t[secondid], name))

            if self.run:
                m = mod.Module(
                    "v.overlay",
                    operator="xor",
                    ainput=t[firstid],
                    binput=t[secondid],
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name

        elif t[operatorid] == "~":
            if self.debug:
                print("v.overlay operator=not ainput=%s binput=%s output=%s" %
                      (t[firstid], t[secondid], name))

            if self.run:
                m = mod.Module(
                    "v.overlay",
                    operator="not",
                    ainput=t[firstid],
                    binput=t[secondid],
                    output=name,
                    run_=False,
                )
                self.cmdlist.add_cmd(m)
            t[0] = name

        elif t[operatorid] == "+":
            patchinput = t[firstid] + "," + t[secondid]
            if self.debug:
                print("v.patch input=%s output=%s" % (patchinput, name))

            if self.run:
                m = mod.Module("v.patch",
                               input=patchinput,
                               output=name,
                               run_=False)
                self.cmdlist.add_cmd(m)
            t[0] = name
Пример #17
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    nprocs = options["nprocs"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())
        print(mod.get_bash())
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Пример #18
0
def main(options, flags):
    import grass.pygrass.modules as pymod
    import grass.temporal as tgis
    from grass.pygrass.vector import VectorTopo

    invect = options["input"]
    if invect.find("@") != -1:
        invect = invect.split("@")[0]
    incol = options["date_column"]
    indate = options["date"]
    endcol = options["final_date_column"]
    enddate = options["final_date"]
    strds = options["strds"]
    nprocs = options["nprocs"]
    if strds.find("@") != -1:
        strds_name = strds.split("@")[0]
    else:
        strds_name = strds
    output = options["output"]
    if options["columns"]:
        cols = options["columns"].split(",")
    else:
        cols = []
    mets = options["method"].split(",")
    gran = options["granularity"]
    dateformat = options["date_format"]
    separator = gscript.separator(options["separator"])
    update = flags["u"]
    create = flags["c"]

    stdout = False
    if output != "-" and update:
        gscript.fatal(_("Cannot combine 'output' option and 'u' flag"))
    elif output != "-" and create:
        gscript.fatal(_("Cannot combine 'output' option and 'c' flag"))
    elif output == "-" and (update or create):
        if update and not cols:
            gscript.fatal(_("Please set 'columns' option"))
        output = invect
    else:
        stdout = True

    if create:
        cols = []
        for m in mets:
            colname = "{st}_{me}".format(st=strds_name, me=m)
            cols.append(colname)
            try:
                pymod.Module(
                    "v.db.addcolumn",
                    map=invect,
                    columns="{col} "
                    "double precision".format(col=colname),
                )
            except CalledModuleError:
                gscript.fatal(
                    _("Not possible to create column "
                      "{col}".format(col=colname)))
        gscript.warning(
            _("Attribute table of vector {name} will be updated"
              "...").format(name=invect))
    elif update:
        colexist = pymod.Module("db.columns", table=invect,
                                stdout_=PI).outputs.stdout.splitlines()
        for col in cols:
            if col not in colexist:
                gscript.fatal(
                    _("Column '{}' does not exist, please create it first".
                      format(col)))
        gscript.warning(
            _("Attribute table of vector {name} will be updated"
              "...").format(name=invect))

    if output != "-" and len(cols) != len(mets):
        gscript.fatal(
            _("'columns' and 'method' options must have the same "
              "number of elements"))
    tgis.init()
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()
    sp = tgis.open_old_stds(strds, "strds", dbif)

    if sp.get_temporal_type() == "absolute":
        if gran:
            delta = int(tgis.gran_to_gran(gran, sp.get_granularity(), True))
            if tgis.gran_singular_unit(gran) in ["year", "month"]:
                delta = int(tgis.gran_to_gran(gran, "1 day", True))
                td = timedelta(delta)
            elif tgis.gran_singular_unit(gran) == "day":
                delta = tgis.gran_to_gran(gran, sp.get_granularity(), True)
                td = timedelta(delta)
            elif tgis.gran_singular_unit(gran) == "hour":
                td = timedelta(hours=delta)
            elif tgis.gran_singular_unit(gran) == "minute":
                td = timedelta(minutes=delta)
            elif tgis.gran_singular_unit(gran) == "second":
                td = timedelta(seconds=delta)
        else:
            td = None
    else:
        if sp.get_granularity() >= int(gran):
            gscript.fatal(
                _("Input granularity is smaller or equal to the {iv}"
                  " STRDS granularity".format(iv=strds)))
        td = int(gran)
    if incol and indate:
        gscript.fatal(_("Cannot combine 'date_column' and 'date' options"))
    elif not incol and not indate:
        gscript.fatal(_("You have to fill 'date_column' or 'date' option"))
    if incol:
        if endcol:
            mysql = "SELECT DISTINCT {dc},{ec} from {vmap} order by " "{dc}".format(
                vmap=invect, dc=incol, ec=endcol)
        else:
            mysql = "SELECT DISTINCT {dc} from {vmap} order by " "{dc}".format(
                vmap=invect, dc=incol)
        try:
            dates = pymod.Module("db.select",
                                 flags="c",
                                 stdout_=PI,
                                 stderr_=PI,
                                 sql=mysql)
            mydates = dates.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            gscript.fatal(_("db.select return an error"))
    elif indate:
        if enddate:
            mydates = ["{ida}|{eda}".format(ida=indate, eda=enddate)]
        else:
            mydates = [indate]
        mydates = [indate]
        pymap = VectorTopo(invect)
        pymap.open("r")
        if len(pymap.dblinks) == 0:
            try:
                pymap.close()
                pymod.Module("v.db.addtable", map=invect)
            except CalledModuleError:
                dbif.close()
                gscript.fatal(
                    _("Unable to add table <%s> to vector map "
                      "<%s>" % invect))
        if pymap.is_open():
            pymap.close()
        qfeat = pymod.Module("v.category",
                             stdout_=PI,
                             stderr_=PI,
                             input=invect,
                             option="print")
        myfeats = qfeat.outputs["stdout"].value.splitlines()

    if stdout:
        outtxt = ""
    for data in mydates:
        try:
            start, final = data.split("|")
        except ValueError:
            start = data
            final = None
        if sp.get_temporal_type() == "absolute":
            fdata = datetime.strptime(start, dateformat)
        else:
            fdata = int(start)
        if final:
            sdata = datetime.strptime(final, dateformat)
        elif flags["a"]:
            sdata = fdata + td
        else:
            sdata = fdata
            fdata = sdata - td
        mwhere = "start_time >= '{inn}' and start_time < " "'{out}'".format(
            inn=fdata, out=sdata)
        lines = None
        try:
            r_what = pymod.Module(
                "t.rast.what",
                points=invect,
                strds=strds,
                layout="timerow",
                separator=separator,
                flags="v",
                where=mwhere,
                quiet=True,
                stdout_=PI,
                stderr_=PI,
                nprocs=nprocs,
            )
            lines = r_what.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            gscript.warning("t.rast.what faild with where='{}'".format(mwhere))
            pass
        if incol:
            if endcol:
                mysql = ("SELECT DISTINCT cat from {vmap} where {dc}='{da}' "
                         "AND {ec}='{ed}' order by cat".format(vmap=invect,
                                                               da=start,
                                                               dc=incol,
                                                               ed=final,
                                                               ec=endcol))
            else:
                mysql = ("SELECT DISTINCT cat from {vmap} where {dc}='{da}' "
                         "order by cat".format(vmap=invect, da=start,
                                               dc=incol))
            try:
                qfeat = pymod.Module("db.select",
                                     flags="c",
                                     stdout_=PI,
                                     stderr_=PI,
                                     sql=mysql)
                myfeats = qfeat.outputs["stdout"].value.splitlines()
            except CalledModuleError:
                gscript.fatal(
                    _("db.select returned an error for date "
                      "{da}".format(da=start)))
        if not lines and stdout:
            for feat in myfeats:
                outtxt += "{di}{sep}{da}".format(di=feat,
                                                 da=start,
                                                 sep=separator)
                for n in range(len(mets)):
                    outtxt += "{sep}{val}".format(val="*", sep=separator)
                outtxt += "\n"
        if not lines:
            continue
        x = 0
        for line in lines:
            vals = line.split(separator)
            if vals[0] in myfeats:
                try:
                    nvals = np.array(vals[3:]).astype(float)
                except ValueError:
                    if stdout:
                        outtxt += "{di}{sep}{da}".format(di=vals[0],
                                                         da=start,
                                                         sep=separator)
                        for n in range(len(mets)):
                            outtxt += "{sep}{val}".format(val="*",
                                                          sep=separator)
                        outtxt += "\n"
                    continue
                if stdout:
                    outtxt += "{di}{sep}{da}".format(di=vals[0],
                                                     da=start,
                                                     sep=separator)
                for n in range(len(mets)):
                    result = None
                    if len(nvals) == 1:
                        result = nvals[0]
                    elif len(nvals) > 1:
                        result = return_value(nvals, mets[n])
                    if stdout:
                        if not result:
                            result = "*"
                        outtxt += "{sep}{val}".format(val=result,
                                                      sep=separator)
                    else:
                        try:
                            if incol:
                                mywhe = "{dc}='{da}' AND ".format(da=start,
                                                                  dc=incol)
                                if endcol:
                                    mywhe += "{dc}='{da}' AND ".format(
                                        da=final, dc=endcol)

                                mywhe += "cat={ca}".format(ca=vals[0])

                                pymod.Module(
                                    "v.db.update",
                                    map=output,
                                    column=cols[n],
                                    value=str(result),
                                    where=mywhe,
                                )
                            else:
                                pymod.Module(
                                    "v.db.update",
                                    map=output,
                                    column=cols[n],
                                    value=str(result),
                                    where="cat={ca}".format(ca=vals[0]),
                                )
                        except CalledModuleError:
                            gscript.fatal(_("v.db.update return an error"))
                if stdout:
                    outtxt += "\n"
                if x == len(myfeats):
                    break
                else:
                    x += 1
    if stdout:
        print(outtxt)
Пример #19
0
def main(options, flags):

    # Get the options
    points = options["points"]
    coordinates = options["coordinates"] 
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    layout = options["layout"]
    null_value = options["null_value"]
    separator = options["separator"]
    
    nprocs = int(options["nprocs"])
    write_header = flags["n"]
    use_stdin = flags["i"]

    #output_cat_label = flags["f"]
    #output_color = flags["r"]
    #output_cat = flags["i"]
    
    overwrite = gscript.overwrite()
    
    if coordinates and points: 
        gscript.fatal(_("Options coordinates and points are mutually exclusive"))

    if not coordinates and not points and not use_stdin: 
        gscript.fatal(_("Please specify the coordinates, the points option or use the 's' option to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates"))

    if use_stdin:
        coordinates_stdin = str(sys.__stdin__.read())
        # Check if coordinates are given with site names or IDs
        stdin_length = len(coordinates_stdin.split('\n')[0].split())
        if stdin_length <= 2:
            site_input = False
        elif stdin_length >= 3:
            site_input = True
    else:
        site_input = False

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, order=order, 
                                             dbif=dbif)
    dbif.close()

    if not maps:
        gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id())

    # Setup separator
    if separator == "pipe":
        separator = "|"
    if separator == "comma":
        separator = ","
    if separator == "space":
        separator = " "
    if separator == "tab":
        separator = "\t"
    if separator == "newline":
        separator = "\n"

    # Setup flags are disabled due to test issues
    flags = ""
    #if output_cat_label is True:
    #    flags += "f"
    #if output_color is True:
    #    flags += "r"
    #if output_cat is True:
    #    flags += "i"

    # Configure the r.what module
    if points: 
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator, points=points, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True) 
    elif coordinates: 
        # Create a list of values
        coord_list = coordinates.split(",")
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator,  
                                        coordinates=coord_list, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True)
    elif use_stdin:
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator,  
                                        stdin_=coordinates_stdin, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True)
    else: 
        grass.error(_("Please specify points or coordinates"))

    if len(maps) < nprocs:
        nprocs = len(maps)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))
    num_maps = len(maps)
    
    # 400 Maps is the absolute maximum in r.what
    # We need to determie the number of maps that can be processed
    # in parallel

    # First estimate the number of maps per process. We use 400 maps
    # simultaniously as maximum for a single process

    num_loops = int(num_maps / (400 * nprocs))
    remaining_maps = num_maps % (400 * nprocs)

    if num_loops == 0:
        num_loops = 1
        remaining_maps = 0

    # Compute the number of maps for each process
    maps_per_loop = int((num_maps - remaining_maps) / num_loops)
    maps_per_process = int(maps_per_loop / nprocs)
    remaining_maps_per_loop = maps_per_loop % nprocs

    # We put the output files in an ordered list
    output_files = []
    output_time_list = []

    count = 0
    for loop in range(num_loops):
        file_name = gscript.tempfile() + "_%i"%(loop)
        count = process_loop(nprocs, maps, file_name, count, maps_per_process, 
                             remaining_maps_per_loop, output_files, 
                             output_time_list, r_what, process_queue)
    
    process_queue.wait()
    
    gscript.verbose("Number of raster map layers remaining for sampling %i"%(remaining_maps))
    if remaining_maps > 0:
        # Use a single process if less then 100 maps
        if remaining_maps <= 100:
            mod = copy.deepcopy(r_what)
            mod(map=map_names, output=file_name)
            process_queue.put(mod)
        else:
            maps_per_process = int(remaining_maps / nprocs)
            remaining_maps_per_loop = remaining_maps % nprocs
            
            file_name = "out_remain"
            process_loop(nprocs, maps, file_name, count, maps_per_process, 
                         remaining_maps_per_loop, output_files, 
                         output_time_list, r_what, process_queue)

    # Wait for unfinished processes
    process_queue.wait()
    
    # Out the output files in the correct order together
    if layout == "row":
        one_point_per_row_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    elif layout == "col":
        one_point_per_col_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    else:
        one_point_per_timerow_output(separator, output_files, output_time_list,
                                     output, write_header, site_input)
Пример #20
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Пример #21
0
def register_map_object_list(type,
                             map_list,
                             output_stds,
                             delete_empty=False,
                             unit=None,
                             dbif=None):
    """Register a list of AbstractMapDataset objects in the temporal database
    and optional in a space time dataset.

    :param type: The type of the map layer (raster, raster_3d, vector)
    :param map_list: List of AbstractMapDataset objects
    :param output_stds: The output stds
    :param delete_empty: Set True to delete empty map layer found in the map_list
    :param unit: The temporal unit of the space time dataset
    :param dbif: The database interface to be used

    """
    import grass.pygrass.modules as pymod
    import copy

    dbif, connection_state_changed = init_dbif(dbif)

    filename = gscript.tempfile(True)
    file = open(filename, "w")

    empty_maps = []
    for map_layer in map_list:
        # Read the map data
        map_layer.load()
        # In case of a empty map continue, do not register empty maps

        if delete_empty:
            if type in ["raster", "raster_3d", "rast", "rast3d"]:
                if (map_layer.metadata.get_min() is None
                        and map_layer.metadata.get_max() is None):
                    empty_maps.append(map_layer)
                    continue
            if type == "vector":
                if map_layer.metadata.get_number_of_primitives() == 0:
                    empty_maps.append(map_layer)
                    continue

        start, end = map_layer.get_temporal_extent_as_tuple()
        id = map_layer.get_id()
        if not end:
            end = start
        string = "%s|%s|%s\n" % (id, str(start), str(end))
        file.write(string)
    file.close()

    if output_stds:
        output_stds_id = output_stds.get_id()
    else:
        output_stds_id = None

    register_maps_in_space_time_dataset(type,
                                        output_stds_id,
                                        unit=unit,
                                        file=filename,
                                        dbif=dbif)

    g_remove = pymod.Module("g.remove",
                            flags="f",
                            quiet=True,
                            run_=False,
                            finish_=True)

    # Remove empty maps and unregister them from the temporal database
    if len(empty_maps) > 0:
        for map in empty_maps:
            mod = copy.deepcopy(g_remove)
            if map.get_name():
                if map.get_type() == "raster":
                    mod(type="raster", name=map.get_name())
                if map.get_type() == "raster3d":
                    mod(type="raster_3d", name=map.get_name())
                if map.get_type() == "vector":
                    mod(type="vector", name=map.get_name())
                mod.run()
            if map.is_in_db(dbif):
                map.delete(dbif)

    if connection_state_changed:
        dbif.close()
Пример #22
0
def aggregate_by_topology(
    granularity_list,
    granularity,
    map_list,
    topo_list,
    basename,
    time_suffix,
    offset=0,
    method="average",
    nprocs=1,
    spatial=None,
    dbif=None,
    overwrite=False,
    file_limit=1000,
):
    """Aggregate a list of raster input maps with r.series

    :param granularity_list: A list of AbstractMapDataset objects.
                             The temporal extents of the objects are used
                             to build the spatio-temporal topology with the
                             map list objects
    :param granularity: The granularity of the granularity list
    :param map_list: A list of RasterDataset objects that contain the raster
                     maps that should be aggregated
    :param topo_list: A list of strings of topological relations that are
                      used to select the raster maps for aggregation
    :param basename: The basename of the new generated raster maps
    :param time_suffix: Use the granularity truncated start time of the
                        actual granule to create the suffix for the basename
    :param offset: Use a numerical offset for suffix generation
                   (overwritten by time_suffix)
    :param method: The aggregation method of r.series (average,min,max, ...)
    :param nprocs: The number of processes used for parallel computation
    :param spatial: This indicates if the spatial topology is created as
                    well: spatial can be None (no spatial topology), "2D"
                    using west, east, south, north or "3D" using west,
                    east, south, north, bottom, top
    :param dbif: The database interface to be used
    :param overwrite: Overwrite existing raster maps
    :param file_limit: The maximum number of raster map layers that
                       should be opened at once by r.series
    :return: A list of RasterDataset objects that contain the new map names
             and the temporal extent for map registration
    """
    import grass.pygrass.modules as pymod
    import copy

    msgr = get_tgis_message_interface()

    dbif, connection_state_changed = init_dbif(dbif)

    topo_builder = SpatioTemporalTopologyBuilder()
    topo_builder.build(mapsA=granularity_list, mapsB=map_list, spatial=spatial)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    # Dummy process object that will be deep copied
    # and be put into the process queue
    r_series = pymod.Module(
        "r.series",
        output="spam",
        method=[method],
        overwrite=overwrite,
        quiet=True,
        run_=False,
        finish_=False,
    )
    g_copy = pymod.Module(
        "g.copy", raster=["spam", "spamspam"], quiet=True, run_=False, finish_=False
    )
    output_list = []
    count = 0

    for granule in granularity_list:
        msgr.percent(count, len(granularity_list), 1)
        count += 1

        aggregation_list = []

        if "equal" in topo_list and granule.equal:
            for map_layer in granule.equal:
                aggregation_list.append(map_layer.get_name())
        if "contains" in topo_list and granule.contains:
            for map_layer in granule.contains:
                aggregation_list.append(map_layer.get_name())
        if "during" in topo_list and granule.during:
            for map_layer in granule.during:
                aggregation_list.append(map_layer.get_name())
        if "starts" in topo_list and granule.starts:
            for map_layer in granule.starts:
                aggregation_list.append(map_layer.get_name())
        if "started" in topo_list and granule.started:
            for map_layer in granule.started:
                aggregation_list.append(map_layer.get_name())
        if "finishes" in topo_list and granule.finishes:
            for map_layer in granule.finishes:
                aggregation_list.append(map_layer.get_name())
        if "finished" in topo_list and granule.finished:
            for map_layer in granule.finished:
                aggregation_list.append(map_layer.get_name())
        if "overlaps" in topo_list and granule.overlaps:
            for map_layer in granule.overlaps:
                aggregation_list.append(map_layer.get_name())
        if "overlapped" in topo_list and granule.overlapped:
            for map_layer in granule.overlapped:
                aggregation_list.append(map_layer.get_name())

        if aggregation_list:
            msgr.verbose(
                _("Aggregating %(len)i raster maps from %(start)s to" " %(end)s")
                % (
                    {
                        "len": len(aggregation_list),
                        "start": str(granule.temporal_extent.get_start_time()),
                        "end": str(granule.temporal_extent.get_end_time()),
                    }
                )
            )

            if granule.is_time_absolute() is True and time_suffix == "gran":
                suffix = create_suffix_from_datetime(
                    granule.temporal_extent.get_start_time(), granularity
                )
                output_name = "{ba}_{su}".format(ba=basename, su=suffix)
            elif granule.is_time_absolute() is True and time_suffix == "time":
                suffix = create_time_suffix(granule)
                output_name = "{ba}_{su}".format(ba=basename, su=suffix)
            else:
                output_name = create_numeric_suffix(
                    basename, count + int(offset), time_suffix
                )

            map_layer = RasterDataset("%s@%s" % (output_name, get_current_mapset()))
            map_layer.set_temporal_extent(granule.get_temporal_extent())

            if map_layer.map_exists() is True and overwrite is False:
                msgr.fatal(
                    _(
                        "Unable to perform aggregation. Output raster "
                        "map <%(name)s> exists and overwrite flag was "
                        "not set" % ({"name": output_name})
                    )
                )

            output_list.append(map_layer)

            if len(aggregation_list) > 1:
                # Create the r.series input file
                filename = gscript.tempfile(True)
                file = open(filename, "w")
                for name in aggregation_list:
                    string = "%s\n" % (name)
                    file.write(string)
                file.close()

                mod = copy.deepcopy(r_series)
                mod(file=filename, output=output_name)
                if len(aggregation_list) > int(file_limit):
                    msgr.warning(
                        _(
                            "The limit of open files (%i) was "
                            "reached (%i). The module r.series will "
                            "be run with flag z, to avoid open "
                            "files limit exceeding."
                            % (int(file_limit), len(aggregation_list))
                        )
                    )
                    mod(flags="z")
                process_queue.put(mod)
            else:
                mod = copy.deepcopy(g_copy)
                mod(raster=[aggregation_list[0], output_name])
                process_queue.put(mod)

    process_queue.wait()

    if connection_state_changed:
        dbif.close()

    msgr.percent(1, 1, 1)

    return output_list
Пример #23
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    method = options["type"]
    nprocs = int(options["nprocs"])
    column = options["column"]

    register_null = flags["n"]
    t_flag = flags["t"]
    s_flag = flags["s"]
    v_flag = flags["v"]
    b_flag = flags["b"]
    z_flag = flags["z"]
    
    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    if s_flag is True:
        flags += "s"
    if v_flag is True:
        flags += "v"
    if b_flag is True:
        flags += "b"
    if z_flag is True:
        flags += "z"
    
    # Configure the r.to.vect module
    to_vector_module = pymod.Module("r.to.vect", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   type=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.to.vect processes was "\
                               "reduced to 1 because of the table attribute "\
                               "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(to_vector_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
Пример #24
0
def main(options, flags):
    import grass.pygrass.modules as pymod
    import grass.temporal as tgis
    from grass.pygrass.vector import VectorTopo

    invect = options["input"]
    if invect.find('@') != -1:
        invect = invect.split('@')[0]
    incol = options["date_column"]
    indate = options["date"]
    strds = options["strds"]
    if strds.find('@') != -1:
        strds_name = strds.split('@')[0]
    else:
        strds_name = strds
    output = options["output"]
    cols = options["columns"].split(',')
    mets = options["method"].split(',')
    gran = options["granularity"]
    dateformat = options["date_format"]
    separator = gscript.separator(options["separator"])

    stdout = False
    if output != '-' and flags['u']:
        gscript.fatal(_("Cannot combine 'output' option and 'u' flag"))
    elif output != '-' and flags['c']:
        gscript.fatal(_("Cannot combine 'output' option and 'c' flag"))
    elif output == '-' and (flags['u'] or flags['c']):
        output = invect
        gscript.warning(_("Attribute table of vector {name} will be updated"
                          "...").format(name=invect))
    else:
        stdout = True
    if flags['c']:
        cols = []
        for m in mets:
            colname = "{st}_{me}".format(st=strds_name, me=m)
            cols.append(colname)
            try:
                pymod.Module("v.db.addcolumn", map=invect, columns="{col} "
                             "double precision".format(col=colname))
            except CalledModuleError:
                gscript.fatal(_("Not possible to create column "
                                "{col}".format(col=colname)))

    if output != '-' and len(cols) != len(mets):
        gscript.fatal(_("'columns' and 'method' options must have the same "
                        "number of elements"))
    tgis.init()
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()
    sp = tgis.open_old_stds(strds, "strds", dbif)

    if sp.get_temporal_type() == 'absolute':
        delta = int(tgis.gran_to_gran(gran, sp.get_granularity(), True))
        if tgis.gran_singular_unit(gran) in ['year', 'month']:
            delta = int(tgis.gran_to_gran(gran, '1 day', True))
            td = timedelta(delta)
        elif tgis.gran_singular_unit(gran) == 'day':
            delta = tgis.gran_to_gran(gran, sp.get_granularity(), True)
            td = timedelta(delta)
        elif tgis.gran_singular_unit(gran) == 'hour':
            td = timedelta(hours=delta)
        elif tgis.gran_singular_unit(gran) == 'minute':
            td = timedelta(minutes=delta)
        elif tgis.gran_singular_unit(gran) == 'second':
            td = timedelta(seconds=delta)
    else:
        if sp.get_granularity() >= int(gran):
            gscript.fatal(_("Input granularity is smaller or equal to the {iv}"
                            " STRDS granularity".format(iv=strds)))
        td = int(gran)
    if incol and indate:
        gscript.fatal(_("Cannot combine 'date_column' and 'date' options"))
    elif not incol and not indate:
        gscript.fatal(_("You have to fill 'date_column' or 'date' option"))
    elif incol:
        try:
            dates = pymod.Module("db.select", flags='c', stdout_=PI,
                                 stderr_=PI, sql="SELECT DISTINCT {dc} from "
                                   "{vmap} order by {dc}".format(vmap=invect,
                                                                 dc=incol))
            mydates = dates.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            gscript.fatal(_("db.select return an error"))
    elif indate:
        mydates = [indate]
        pymap = VectorTopo(invect)
        pymap.open('r')
        if len(pymap.dblinks) == 0:
            try:
                pymap.close()
                pymod.Module("v.db.addtable", map=invect)
            except CalledModuleError:
                dbif.close()
                gscript.fatal(_("Unable to add table <%s> to vector map "
                                "<%s>" % invect))
        if pymap.is_open():
            pymap.close()
        qfeat = pymod.Module("v.category", stdout_=PI, stderr_=PI,
                             input=invect, option='print')
        myfeats = qfeat.outputs["stdout"].value.splitlines()

    if stdout:
        outtxt = ''
    for data in mydates:
        if sp.get_temporal_type() == 'absolute':
            fdata = datetime.strptime(data, dateformat)
        else:
            fdata = int(data)
        if flags['a']:
            sdata = fdata + td
            mwhere = "start_time >= '{inn}' and end_time < " \
                   "'{out}'".format(inn=fdata, out=sdata)
        else:
            sdata = fdata - td
            mwhere = "start_time >= '{inn}' and end_time < " \
                   "'{out}'".format(inn=sdata, out=fdata)
        lines = None
        try:
            r_what = pymod.Module("t.rast.what", points=invect, strds=strds,
                                  layout='timerow', separator=separator,
                                  flags="v", where=mwhere, quiet=True,
                                  stdout_=PI, stderr_=PI)
            lines = r_what.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            pass
        if incol:
            try:
                qfeat = pymod.Module("db.select", flags='c', stdout_=PI,
                                     stderr_=PI, sql="SELECT DISTINCT cat from"
                                     " {vmap} where {dc}='{da}' order by "
                                     "cat".format(vmap=invect, da=data,
                                                  dc=incol))
                myfeats = qfeat.outputs["stdout"].value.splitlines()
            except CalledModuleError:
                gscript.fatal(_("db.select returned an error for date "
                                "{da}".format(da=data)))
        if not lines and stdout:
            for feat in myfeats:
                outtxt += "{di}{sep}{da}".format(di=feat, da=data,
                                                   sep=separator)
                for n in range(len(mets)):
                    outtxt += "{sep}{val}".format(val='*', sep=separator)
                outtxt += "\n"
        if not lines:
            continue
        x = 0
        for line in lines:
            vals = line.split(separator)
            if vals[0] in myfeats:
                try:
                    nvals = np.array(vals[4:]).astype(np.float)
                except ValueError:
                    if stdout:
                        outtxt += "{di}{sep}{da}".format(di=vals[0],
                                                         da=data,
                                                         sep=separator)
                        for n in range(len(mets)):
                            outtxt += "{sep}{val}".format(val='*',
                                                          sep=separator)
                        outtxt += "\n"
                    continue
                if stdout:
                    outtxt += "{di}{sep}{da}".format(di=vals[0], da=data,
                                                     sep=separator)
                for n in range(len(mets)):
                    result = return_value(nvals, mets[n])
                    if stdout:
                        outtxt += "{sep}{val}".format(val=result,
                                                      sep=separator)
                    else:
                        try:
                            if incol:
                                pymod.Module("v.db.update", map=output,
                                             column=cols[n], value=str(result),
                                             where="{dc}='{da}' AND cat="
                                             "{ca}".format(da=data, ca=vals[0],
                                                           dc=incol))
                            else:
                                pymod.Module("v.db.update", map=output,
                                             column=cols[n], value=str(result),
                                             where="cat={ca}".format(ca=vals[0]))
                        except CalledModuleError:
                            gscript.fatal(_("v.db.update return an error"))
                if stdout:
                    outtxt += "\n"
                if x == len(myfeats):
                    break
                else:
                    x += 1
    if stdout:
        print(outtxt)
Пример #25
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    base = options["basename"]
    where = options["where"]
    nprocs = options["nprocs"]
    tsuffix = options["suffix"]

    mapset = grass.encode(grass.gisenv()["MAPSET"])

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds")

    maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif)

    num = len(maps)

    # Configure the r.to.vect module
    gapfill_module = pymod.Module(
        "r.series.interp",
        overwrite=grass.overwrite(),
        quiet=True,
        run_=False,
        finish_=False,
    )

    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    gap_list = []
    overwrite_flags = {}

    # Identify all gaps and create new names
    count = 0
    for _map in maps:
        if _map.get_id() is None:
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix in [
                    'gran', 'time'
            ]:
                _id = "{ba}@{ma}".format(ba=base, ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(base, num + count,
                                                      tsuffix)
                _id = "{name}@{ma}".format(name=map_name, ma=mapset)
            _map.set_id(_id)

            gap_list.append(_map)

    if len(gap_list) == 0:
        grass.message(_("No gaps found"))
        return

    # Build the temporal topology
    tb = tgis.SpatioTemporalTopologyBuilder()
    tb.build(maps)

    # Do some checks before computation
    for _map in gap_list:
        if not _map.get_precedes() or not _map.get_follows():
            grass.fatal(
                _("Unable to determine successor "
                  "and predecessor of a gap."))

        if len(_map.get_precedes()) > 1:
            grass.warning(
                _("More than one successor of the gap found. "
                  "Using the first found."))

        if len(_map.get_follows()) > 1:
            grass.warning(
                _("More than one predecessor of the gap found. "
                  "Using the first found."))

    # Interpolate the maps using parallel processing
    result_list = []

    for _map in gap_list:
        predecessor = _map.get_follows()[0]
        successor = _map.get_precedes()[0]

        gran = sp.get_granularity()
        tmpval, start = predecessor.get_temporal_extent_as_tuple()
        end, tmpval = successor.get_temporal_extent_as_tuple()

        # Now resample the gap
        map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity(
            (_map, ), start, end, gran)

        map_names = []
        map_positions = []

        increment = 1.0 / (len(map_matrix) + 1.0)
        position = increment
        count = 0
        for intp_list in map_matrix:
            new_map = intp_list[0]
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    new_map.temporal_extent.get_start_time(),
                    sp.get_granularity())
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            elif sp.get_temporal_type() == 'absolute' and tsuffix == 'time':
                suffix = tgis.create_time_suffix(new_map)
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(new_map.get_name(),
                                                      count, tsuffix)
                new_id = "{name}@{ma}".format(name=map_name, ma=mapset)

            new_map.set_id(new_id)

            overwrite_flags[new_id] = False
            if new_map.map_exists() or new_map.is_in_db(dbif):
                if not grass.overwrite():
                    grass.fatal(
                        _("Map with name <%s> already exists. "
                          "Please use another base name." % (_id)))
                else:
                    if new_map.is_in_db(dbif):
                        overwrite_flags[new_id] = True

            map_names.append(new_map.get_name())
            map_positions.append(position)
            position += increment

            result_list.append(new_map)

        mod = copy.deepcopy(gapfill_module)
        mod(input=(predecessor.get_map_id(), successor.get_map_id()),
            datapos=(0, 1),
            output=map_names,
            samplingpos=map_positions)
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Insert new interpolated maps in temporal database and dataset
    for _map in result_list:
        id = _map.get_id()
        if overwrite_flags[id] == True:
            if _map.is_time_absolute():
                start, end = _map.get_absolute_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_absolute_time(start, end)
            else:
                start, end, unit = _map.get_relative_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_relative_time(start, end, unit)
        _map.load()
        _map.insert(dbif)
        sp.register_map(_map, dbif)

    sp.update_from_registered_maps(dbif)
    sp.update_command_string(dbif=dbif)
    dbif.close()
Пример #26
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output,
                                 "stvds",
                                 dbif=dbif,
                                 overwrite=overwrite)

    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"

    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour",
                                  input="dummy",
                                  output="dummy",
                                  run_=False,
                                  finish_=False,
                                  flags=flags,
                                  overwrite=overwrite,
                                  quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(
                _("The number of parellel r.contour processes was "
                  "reduced to 1 because of the table attribute "
                  "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1

        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="vector",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype,
                                dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove",
                            flags='f',
                            type='vector',
                            name=names,
                            quiet=True)

    dbif.close()
Пример #27
0
def main():

    # Get the options
    datasets = options["inputs"]
    file = options["file"]
    type = options["type"]
    recursive = flags["r"]
    force = flags["f"]

    if recursive and not force:
        grass.fatal(_("The recursive flag works only in conjunction with the force flag: use -rf"))

    if datasets and file:
        grass.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file"))

    # Make sure the temporal database exists
    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    dataset_list = []

    # Dataset names as comma separated string
    if datasets:
        if datasets.find(",") == -1:
            dataset_list = (datasets,)
        else:
            dataset_list = tuple(datasets.split(","))

    # Read the dataset list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            line_list = line.split("\n")
            dataset_name = line_list[0]
            dataset_list.append(dataset_name)

    statement = ""

    # Create the pygrass Module object for g.remove
    remove = pyg.Module("g.remove", quiet=True, flags='f', run_=False)

    for name in dataset_list:
        name = name.strip()
        sp = tgis.open_old_stds(name, type, dbif)

        if recursive and force:
            grass.message(_("Removing registered maps and %s" % type))
            maps = sp.get_registered_maps_as_objects(dbif=dbif)
            map_statement = ""
            count = 1
            name_list = []
            for map in maps:
                map.select(dbif)
                # We may have multiple layer for a single map, hence we need
                # to avoid multiple deletation of the same map,
                # but the database entries are still present and must be removed
                if map.get_name() not in name_list:
                    name_list.append(str(map.get_name()))
                map_statement += map.delete(dbif=dbif, execute=False)

                count += 1
                # Delete every 100 maps
                if count%100 == 0:
                    dbif.execute_transaction(map_statement)
                    if type == "strds":
                        remove(type="raster", name=name_list, run_=True)
                    if type == "stvds":
                        remove(type="vector", name=name_list, run_=True)
                    if type == "str3ds":
                        remove(type="raster_3d", name=name_list, run_=True)
                    map_statement = ""
                    name_list = []

            if map_statement:
                dbif.execute_transaction(map_statement)
            if name_list:
                if type == "strds":
                    remove(type="raster", name=name_list, run_=True)
                if type == "stvds":
                    remove(type="vector", name=name_list, run_=True)
                if type == "str3ds":
                    remove(type="raster_3d", name=name_list, run_=True)
        else:
            grass.message(_("Note: registered maps themselves have not been removed, only the %s" % type))

        statement += sp.delete(dbif=dbif, execute=False)

    # Execute the collected SQL statenents
    dbif.execute_transaction(statement)

    dbif.close()