def test_common_granularity_1(self):
        """Testing the common granularity function. """
        ta = tgis.TemporalAlgebraParser(run=True, debug=True)
        expr = 'R = A : B'
        ret = ta.setup_common_granularity(expression=expr)

        self.assertEqual(ret, True)
        self.assertEqual(ta.granularity, "1 month")

        ta.count = 0
        ta.stdstype = "strds"
        ta.maptype = "raster"
        ta.mapclass = tgis.RasterDataset

        maplist = ta.check_stds("A")
        self.assertEqual(len(maplist), 6)
        maplist = ta.check_stds("B")
        self.assertEqual(len(maplist), 6)
        
        ta.parse(expression=expr, basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 6)
        self.assertEqual(D.metadata.get_min_min(), 1) 
        self.assertEqual(D.metadata.get_max_max(), 6) 
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 7, 1))
        self.assertEqual( D.check_temporal_topology(),  True)
        self.assertEqual(D.get_granularity(),  u'1 month')
예제 #2
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing"))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series", flags=flag, file=filename,
                              output=output, overwrite=grass.overwrite(),
                              method=method)
        except CalledModuleError:
            grass.fatal(_("%s failed. Check above error messages.") % 'r.series')

        if not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
예제 #3
0
 def test_temporal_intersection_1(self):
     """Simple temporal intersection test"""
    
     self.assertModule("t.rast.algebra",  expression="R = A {+,equal,i} B", basename="r")
     D = tgis.open_old_stds("R", type="strds")
     
     self.assertEqual(D.metadata.get_number_of_maps(), 0)
 def test_simple_arith_2(self):
     """Simple arithmetic test that creates an empty strds"""
     tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
     tra.parse(expression="R = A {*, during} A {+, during} A", basename="r", overwrite=True)
     D = tgis.open_old_stds("R", type="strds")
     D.select()
     self.assertEqual(D.metadata.get_number_of_maps(), 0)
    def test_temporal_extent1(self):
        """Testing the temporal extent operators. """
        ta = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        ta.parse(expression="R = A {:,during,r} C",   basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        maplist = D.get_registered_maps_as_objects()
        self.assertEqual(D.metadata.get_number_of_maps(), 2)
        self.assertEqual(D.metadata.get_min_min(), 2)
        self.assertEqual(D.metadata.get_max_max(), 3)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 1, 4))
        self.assertEqual( D.check_temporal_topology(),  False)
        self.assertEqual(D.get_granularity(),  u'2 days')

        ta = tgis.TemporalRasterAlgebraParser(run=True, debug=True, dry_run=True)
        pc = ta.parse(expression="R = A {:,during,r} C", basename="r", overwrite=True)

        self.assertEqual(len(pc["register"]), 2)
        self.assertEqual(len(pc["processes"]), 2)
        self.assertEqual(pc["processes"][0]["name"], "r.mapcalc")
        self.assertEqual(pc["processes"][1]["name"], "r.mapcalc")
        self.assertEqual(pc["STDS"]["name"], "R")
        self.assertEqual(pc["STDS"]["stdstype"], "strds")
 def test_temporal_intersection_1(self):
     """Simple temporal intersection test"""
     tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
     tra.parse(expression="R = A {+,equal,i} B", basename="r", overwrite=True)
     D = tgis.open_old_stds("R", type="strds")
     D.select()
     self.assertEqual(D.metadata.get_number_of_maps(), 0)
예제 #7
0
def main():

    # Get the options
    input = options["input"]
    where = options["where"]
    columns = options["columns"]
    tempwhere = options["t_where"]
    layer = options["layer"]
    separator = grass.separator(options["separator"])

    if where == "" or where == " " or where == "\n":
        where = None

    if columns == "" or columns == " " or columns == "\n":
        columns = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "stvds")

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", None)

    col_names = ""
    if rows:
        for row in rows:
            vector_name = "%s@%s" % (row["name"], row["mapset"])
            # In case a layer is defined in the vector dataset,
            # we override the option layer
            if row["layer"]:
                layer = row["layer"]

            select = grass.read_command("v.db.select", map=vector_name,
                                        layer=layer, columns=columns,
                                        separator="%s" % (separator), where=where)

            if not select:
                grass.fatal(_("Unable to run v.db.select for vector map <%s> "
                              "with layer %s") % (vector_name, layer))
            # The first line are the column names
            list = select.split("\n")
            count = 0
            for entry in list:
                if entry.strip() != "":
                    # print the column names in case they change
                    if count == 0:
                        col_names_new = "start_time%send_time%s%s" % (
                            separator, separator, entry)
                        if col_names != col_names_new:
                            col_names = col_names_new
                            print col_names
                    else:
                        if row["end_time"]:
                            print "%s%s%s%s%s" % (row["start_time"], separator,
                                                  row["end_time"], separator, entry)
                        else:
                            print "%s%s%s%s" % (row["start_time"],
                                                separator, separator, entry)
                    count += 1
    def test_1(self):
        """Simple arithmetik test"""
        tra = tgis.TemporalRasterAlgebraParser(run = True, debug = True)
        expr = "R = if(C == 9,  A - 1)"
        ret = tra.setup_common_granularity(expression=expr,  lexer = tgis.TemporalRasterAlgebraLexer())
        self.assertEqual(ret, True)
        
        tra.parse(expression=expr, basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 6)
        self.assertEqual(D.metadata.get_min_min(), 0) # 1 - 1
        self.assertEqual(D.metadata.get_max_max(), 5) # 6 - 1
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 7, 1))
        self.assertEqual( D.check_temporal_topology(),  True)
        self.assertEqual(D.get_granularity(),  u'1 month')

        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True, dry_run=True)
        tra.setup_common_granularity(expression=expr,  lexer = tgis.TemporalRasterAlgebraLexer())
        pc = tra.parse(expression=expr, basename="r", overwrite=True)

        self.assertEqual(len(pc["register"]), 6)
        self.assertEqual(len(pc["processes"]), 6)
        self.assertEqual(pc["processes"][0]["name"], "r.mapcalc")
        self.assertEqual(pc["processes"][5]["name"], "r.mapcalc")
        self.assertEqual(pc["STDS"]["name"], "R")
        self.assertEqual(pc["STDS"]["stdstype"], "strds")
    def test_3(self):
        """Simple arithmetik test with null map"""
        tra = tgis.TemporalRasterAlgebraParser(run = True, debug = True)
        expr = "R = A + B + C + tmap(nullmap)"
        ret = tra.setup_common_granularity(expression=expr,
                                           lexer=tgis.TemporalRasterAlgebraLexer())
        self.assertEqual(ret, True)

        tra.parse(expression=expr, basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 0)
        self.assertEqual(D.metadata.get_min_min(), None)
        self.assertEqual(D.metadata.get_max_max(), None)
        start, end = D.get_absolute_time()
        self.assertEqual(start, None)
        self.assertEqual(end, None)
        self.assertEqual( D.check_temporal_topology(),  False)
        self.assertEqual(D.get_granularity(),  None)

        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True, dry_run=True)
        tra.setup_common_granularity(expression=expr,  lexer = tgis.TemporalRasterAlgebraLexer())
        pc = tra.parse(expression=expr, basename="r", overwrite=True)

        print(pc)

        self.assertEqual(len(pc["register"]), 0)
        self.assertEqual(len(pc["processes"]), 0)
        self.assertEqual(pc["STDS"]["name"], "R")
        self.assertEqual(pc["STDS"]["stdstype"], "strds")
예제 #10
0
 def test_simple_arith_2(self):
     """Simple arithmetic test that creates an empty strds"""
    
     self.assertModule("t.rast.algebra",  expression="R = A {*, during} A {+, during} A", basename="r")
     D = tgis.open_old_stds("R", type="strds")
     
     self.assertEqual(D.metadata.get_number_of_maps(), 0)
예제 #11
0
    def populate_user_flow_stds(self, rast_quser_name, overwrite):
        '''rast_quser_name: name of user flow raster map
        '''
        arr_qfix = garray.array(dtype=np.float32)
        arr_qvar = garray.array(dtype=np.float32)
        arr_quser = garray.array(dtype=np.float32)

        map_list = []
        var_map_list = []
        # iterate in boundary conditions
        for bc_key, bc_value in self.content.iteritems():
            start_coord = bc_value['start_coor']
            end_coord = bc_value['end_coor']

            if bc_value['type'] == 'QFIX':
                value = bc_value['value'][0][0]
                if not arr_qfix:
                    arr_qfix = populate_array(
                                arr_qfix, start_coord, end_coord, value)
                # Add all qfix together to make only one map
                else:
                    arr_qfix += populate_array(arr_qfix, start_coord,
                                                    end_coord, value)

            elif bc_value['type'] == 'QVAR':
                for bc_var_value in bc_value['values']:
                    arr_qvar = populate_array(arr_qvar,
                        start_coord, end_coord, bc_var_value[0])
                    var_map_list.append((arr_qvar,
                                    bc_var_value[1],
                                    bc_value['time_unit']))

        for var_map in var_map_list:
            # include all QFIX and QVAR in one map
            arr_quser[:] = var_map[0] + arr_qfix
            # write GRASS map
            rast_name_var = '{}_{}'.format(
                rast_quser_name, str(int(var_map[1])))
            rast_id_var = tgis.AbstractMapDataset.build_id(
                            rast_name_var, self.mapset)
            arr_quser.write(mapname=rast_id_var, overwrite=overwrite)
            # add temporal informations
            rast_var = tgis.RasterDataset(rast_id_var)
            rast_var.set_relative_time(start_time=var_map[1],
                        end_time=None, unit=var_map[2])
            map_list.append(rast_var)

        # Register maps in the space-time dataset
        if map_list:
            stds = tgis.open_old_stds(rast_quser_name, 'strds', dbif=self.dbif)
            tgis.register.register_map_object_list('raster',
                                map_list, output_stds=stds,
                                     delete_empty=True, unit=var_map[2],
                                     dbif=self.dbif)
        return self
예제 #12
0
파일: test_snap.py 프로젝트: rkrug/grass-ci
    def test_1_metadata(self):
        """Set title, description and aggregation"""

        A = tgis.open_old_stds("A", type="strds")
        A.select()
        self.assertEqual(A.get_map_time(), "point")

        self.assertModule("t.snap", input="A", type="strds")

        A.select()
        self.assertEqual(A.get_map_time(), "interval")
예제 #13
0
    def test_3_update(self):
        """Set title, description and aggregation"""
        
        self.runModule("g.remove", type="vector", name="a4", flags="f")
        
        self.assertModule("t.support", type="stvds", input="A", flags="m")

        A = tgis.open_old_stds("A", type="stvds")
        A.select()
        self.assertEqual(A.metadata.get_number_of_points(), 30) 
        self.assertEqual(A.metadata.get_number_of_maps(), 3)      
예제 #14
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = "z"
        if nulls:
            flag += "n"

        ret = grass.run_command("r.series", flags=flag, file=filename,
                                output=output, overwrite=grass.overwrite(),
                                method=method)

        if ret == 0 and not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
예제 #15
0
    def test_simple_arith_hash_1(self):
        """Simple arithmetic test including the hash operator"""
       
        self.assertModule("t.rast.algebra",  expression='R = A + (A {#, equal,l} A)', basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 2)
        self.assertEqual(D.metadata.get_max_max(), 5)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #16
0
    def test_temporal_intersection_7(self):
        """Simple temporal intersection test"""
       
        self.assertModule("t.rast.algebra",  expression="R = B {+,overlapped,u} C", basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 1)
        self.assertEqual(D.metadata.get_min_min(), 13) # 6 + 7
        self.assertEqual(D.metadata.get_max_max(), 13) # 6 + 7
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #17
0
    def test_temporal_intersection_5(self):
        """Simple temporal intersection test"""
       
        self.assertModule("t.rast.algebra",  expression="R = A {+,starts|finishes,i} B", basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 6)  # 1 + 5
        self.assertEqual(D.metadata.get_max_max(), 10) # 4 + 6
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #18
0
    def test_simple_arith_td_4(self):
        """Simple arithmetic test"""
       
        self.assertModule("t.rast.algebra",  expression='R = A {/, equal} td(A)', basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 1)
        self.assertEqual(D.metadata.get_max_max(), 4)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
    def test_simple_arith_if_1(self):
        """Simple arithmetic test with if condition"""
        expr = 'R = if(start_date(A) >= "2001-02-01", A + A)'
        self.assertModule("t.rast.algebra",  expression=expr, flags="g", basename="r")

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 5)
        self.assertEqual(D.metadata.get_min_min(), 4)
        self.assertEqual(D.metadata.get_max_max(), 12)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 2, 1))
        self.assertEqual(end, datetime.datetime(2001, 7, 1))
예제 #20
0
    def test_simple_arith_if_2(self):
        """Simple arithmetic test with if condition"""
       
        self.assertModule("t.rast.algebra",  expression='R = if({equal}, A#A == 1, A - A)', basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 0)
        self.assertEqual(D.metadata.get_max_max(), 0)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #21
0
파일: utils.py 프로젝트: caomw/grass
def getRegisteredMaps(timeseries, etype):
    """Returns list of maps registered in dataset.
    Can throw ScriptError if the dataset doesn't exist.
    """
    timeseriesMaps = []
    sp = tgis.open_old_stds(timeseries, etype)

    rows = sp.get_registered_maps(columns="id", where=None, order="start_time")
    timeseriesMaps = []
    if rows:
        for row in rows:
            timeseriesMaps.append(row["id"])
    return timeseriesMaps
    def test_temporal_intersection_7(self):
        """Simple temporal intersection test"""
        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        tra.parse(expression="R = B {+,overlapped,u} C", basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        self.assertEqual(D.metadata.get_number_of_maps(), 1)
        self.assertEqual(D.metadata.get_min_min(), 13) # 6 + 7
        self.assertEqual(D.metadata.get_max_max(), 13) # 6 + 7
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
    def test_complex_arith_if_1(self):
        """Complex arithmetic test with if condition"""
        expr = 'R = if(start_date(A) < "2001-03-01" && A#A == 1, A+C, A-C)'
        self.assertModule("t.rast.algebra",  expression=expr, flags="g", basename="r")

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 6)
        self.assertEqual(D.metadata.get_min_min(), -6)  # 3 - 9
        self.assertEqual(D.metadata.get_max_max(), 11) # 2 + 2
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 7, 1))
    def test_temporal_neighbors(self):
        """Simple temporal neighborhood computation test"""
        expr ='R = (A[0,0,-1] : D) + (A[0,0,1] : D)'
        self.assertModule("t.rast.algebra",  expression=expr, flags="g", basename="r")

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 14)
        self.assertEqual(D.metadata.get_min_min(), 2)  # 1 + 1
        self.assertEqual(D.metadata.get_max_max(), 10) # 5 + 5
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 5, 6))
    def test_temporal_intersection_5(self):
        """Simple temporal intersection test"""
        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        tra.parse(expression="R = A {+,starts|finishes,i} B", basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 6)  # 1 + 5
        self.assertEqual(D.metadata.get_max_max(), 10) # 4 + 6
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
    def test_map(self):
        """Test STDS + single map without timestamp"""
        expr = "R = A + map(singletmap)"
        self.assertModule("t.rast.algebra",  expression=expr, flags="g", basename="r")

        D = tgis.open_old_stds("R", type="strds")

        self.assertEqual(D.metadata.get_number_of_maps(), 6)
        self.assertEqual(D.metadata.get_min_min(), 100)  # 1 + 99
        self.assertEqual(D.metadata.get_max_max(), 105) # 6 + 99
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 7, 1))
    def test_simple_arith_hash_1(self):
        """Simple arithmetic test including the hash operator"""
        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        tra.parse(expression='R = A + (A {#, equal,l} A)', basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 2)
        self.assertEqual(D.metadata.get_max_max(), 5)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
    def test_simple_arith_if_2(self):
        """Simple arithmetic test with if condition"""
        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        tra.parse(expression='R = if({equal}, A#A == 1, A - A)', basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 0)
        self.assertEqual(D.metadata.get_max_max(), 0)
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
    def test_simple_arith_3(self):
        """Simple arithmetic test"""
        tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True)
        tra.parse(expression="R = A / A + A*A/A", basename="r", overwrite=True)

        D = tgis.open_old_stds("R", type="strds")
        D.select()
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 2) # 1/1 + 1*1/1
        self.assertEqual(D.metadata.get_max_max(), 5) # 4/4 + 4*4/4
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #30
0
    def test_simple_arith_3(self):
        """Simple arithmetic test"""
       
        self.assertModule("t.rast.algebra",  expression="R = A / A + A*A/A", basename="r")

        D = tgis.open_old_stds("R", type="strds")
        
        self.assertEqual(D.metadata.get_number_of_maps(), 4)
        self.assertEqual(D.metadata.get_min_min(), 2) # 1/1 + 1*1/1
        self.assertEqual(D.metadata.get_max_max(), 5) # 4/4 + 4*4/4
        start, end = D.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 5))
예제 #31
0
tgis.init()
dbif = tgis.SQLDatabaseInterfaceConnection()
dbif.connect()

# creamos el strds que debemos rellenar
SPI_RF = 'spi_rf'
dataset = tgis.open_new_stds(name=SPI_RF,
                             type='strds',
                             temporaltype='absolute',
                             title="SPI RF",
                             descr="SPI predicho por RF",
                             semantic='mean',
                             overwrite=True)

dataset_name_rf = 'spi_rf@PERMANENT'
dataset = tgis.open_old_stds(dataset_name_rf, "strds", dbif=dbif)

SPI_XG = 'spi_xg'
dataset = tgis.open_new_stds(name=SPI_XG,
                             type='strds',
                             temporaltype='absolute',
                             title="SPI XG",
                             descr="SPI predicho por XG",
                             semantic='mean',
                             overwrite=True)

dataset_name_xg = 'spi_xg@PERMANENT'
dataset = tgis.open_old_stds(dataset_name_xg, "strds", dbif=dbif)

# abrimos los antiguos strds para el calculo
예제 #32
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    name = options["input"]
    type = options["type"]
    title = options["title"]
    aggr_type = options["aggr_type"]
    description = options["description"]
    semantic = options["semantictype"]
    update = flags["u"]
    map_update = flags["m"]

    # Make sure the temporal database exists
    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds = tgis.open_old_stds(name, type, dbif)

    update = False
    if aggr_type and type == "stvds":
        return ()

    if aggr_type and type != "stvds":
        stds.metadata.set_aggregation_type(aggregation_type=aggr_type)
        update = True
    if title:
        stds.metadata.set_title(title=title)
        update = True
        # Update only non-null entries
    if description:
        stds.metadata.set_description(description=description)
        update = True
    if semantic:
        stds.base.set_semantic_type(semantic_type=semantic)
        update = True

    if update:
        stds.update(dbif=dbif)

    if map_update:
        # Update the registered maps from the grass spatial database
        statement = ""
        # This dict stores the datasets that must be updated
        dataset_dict = {}

        count = 0
        maps = stds.get_registered_maps_as_objects(dbif=dbif)

        # We collect the delete and update statements
        for map in maps:

            count += 1
            if count % 10 == 0:
                grass.percent(count, len(maps), 1)

            map.select(dbif=dbif)

            # Check if the map is present in the grass spatial database
            # Update if present, delete if not present
            if map.map_exists():
                # Read new metadata from the spatial database
                map.load()
                statement += map.update(dbif=dbif, execute=False)
            else:
                # Delete the map from the temporal database
                # We need to update all effected space time datasets
                datasets = map.get_registered_stds(dbif)
                if datasets:
                    for dataset in datasets:
                        dataset_dict[dataset] = dataset
                # Collect the delete statements
                statement += map.delete(dbif=dbif, update=False, execute=False)

        # Execute the collected SQL statements
        dbif.execute_transaction(statement)

        # Update the effected space time datasets
        for id in dataset_dict:
            stds_new = stds.get_new_instance(id)
            stds_new.select(dbif=dbif)
            stds_new.update_from_registered_maps(dbif=dbif)

    if map_update or update:
        stds.update_from_registered_maps(dbif=dbif)

    stds.update_command_string(dbif=dbif)

    dbif.close()
예제 #33
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.gisenv()["MAPSET"]

    inputs_split = inputs.split(",")
    input_ids = []

    for input in inputs_split:
        if input.find("@") >= 0:
            input_ids.append(input)
        else:
            input_ids.append(input + "@" + mapset)

    # Set the output name correct
    if output.find("@") >= 0:
        out_mapset = output.split("@")[1]
        if out_mapset != mapset:
            grass.fatal(
                _("Output space time dataset <%s> must be located in this mapset"
                  ) % (output))
    else:
        output_id = output + "@" + mapset

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds_list = []
    first = None

    for id in input_ids:
        stds = tgis.open_old_stds(id, type, dbif)
        if first is None:
            first = stds

        if first.get_temporal_type() != stds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Space time datasets to merge must have the same temporal type"
                  ))

        stds_list.append(stds)

    # Do nothing if nothing to merge
    if first is None:
        dbif.close()
        return

    # Check if the new id is in the database
    output_stds = tgis.dataset_factory(type, output_id)
    output_exists = output_stds.is_in_db(dbif=dbif)

    if output_exists == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\
                      "please use the overwrite flag.") % \
                      (stds.get_new_map_instance(None).get_type(), output_id))

    if not output_exists:
        output_stds = tgis.open_new_stds(output,
                                         type,
                                         first.get_temporal_type(),
                                         "Merged space time dataset",
                                         "Merged space time dataset",
                                         "mean",
                                         dbif=dbif,
                                         overwrite=False)
    else:
        output_stds.select(dbif=dbif)

    registered_output_maps = {}
    # Maps that are already registered in an existing dataset
    # are not registered again
    if output_exists == True:
        rows = output_stds.get_registered_maps(columns="id", dbif=dbif)
        if rows:
            for row in rows:
                registered_output_maps[row["id"]] = row["id"]

    for stds in stds_list:
        # Avoid merging of already registered maps
        if stds.get_id() != output_stds.get_id():
            maps = stds.get_registered_maps_as_objects(dbif=dbif)

            if maps:
                for map in maps:
                    # Jump over already registered maps
                    if map.get_id() in registered_output_maps:
                        continue

                    map.select(dbif=dbif)
                    output_stds.register_map(map=map, dbif=dbif)
                    # Update the registered map list
                    registered_output_maps[map.get_id()] = map.get_id()

    output_stds.update_from_registered_maps(dbif=dbif)

    if output_exists == True:
        output_stds.update_command_string(dbif=dbif)
예제 #34
0
def main():

    # Get the options
    file = options["file"]
    input = options["input"]
    maps = options["maps"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    if maps and file:
        grass.fatal(_(
            "%s= and %s= are mutually exclusive") % ("input", "file"))

    if not maps and not file:
        grass.fatal(_("%s= or %s= must be specified") % ("input", "file"))

    mapset = grass.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # In case a space time dataset is specified
    if input:
        sp = tgis.open_old_stds(input, type, dbif)

    maplist = []

    dummy = tgis.RasterDataset(None)

    # Map names as comma separated string
    if maps is not None and maps != "":
        if maps.find(",") == -1:
            maplist = [maps, ]
        else:
            maplist = maps.split(",")

        # Build the maplist
        for count in range(len(maplist)):
            mapname = maplist[count]
            mapid = dummy.build_id(mapname, mapset)
            maplist[count] = mapid

    # Read the map list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            mapname = line.strip()
            mapid = dummy.build_id(mapname, mapset)
            maplist.append(mapid)

    num_maps = len(maplist)
    update_dict = {}
    count = 0

    statement = ""

    # Unregister already registered maps
    grass.message(_("Unregister maps"))
    for mapid in maplist:
        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        map = tgis.dataset_factory(type, mapid)

        # Unregister map if in database
        if map.is_in_db(dbif) == True:
            # Unregister from a single dataset
            if input:
                # Collect SQL statements
                statement += sp.unregister_map(
                    map=map, dbif=dbif, execute=False)

            # Unregister from temporal database
            else:
                # We need to update all datasets after the removement of maps
                map.metadata.select(dbif)
                datasets = map.get_registered_stds(dbif)
                # Store all unique dataset ids in a dictionary
                if datasets:
                    for dataset in datasets:
                        update_dict[dataset] = dataset
                # Collect SQL statements
                statement += map.delete(dbif=dbif, update=False, execute=False)
        else:
            grass.warning(_("Unable to find %s map <%s> in temporal database" %
                            (map.get_type(), map.get_id())))

        count += 1

    # Execute the collected SQL statenents
    if statement:
        dbif.execute_transaction(statement)

    grass.percent(num_maps, num_maps, 1)

    # Update space time datasets
    if input:
        grass.message(_("Unregister maps from space time dataset <%s>"%(input)))
    else:
        grass.message(_("Unregister maps from the temporal database"))

    if input:
        sp.update_from_registered_maps(dbif)
        sp.update_command_string(dbif=dbif)
    elif len(update_dict) > 0:
        count = 0
        for key in update_dict.keys():
            id = update_dict[key]
            sp = tgis.open_old_stds(id, type, dbif)
            sp.update_from_registered_maps(dbif)
            grass.percent(count, len(update_dict), 1)
            count += 1

    dbif.close()
예제 #35
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    color = options["color"]
    raster = options["raster"]
    volume = options["raster_3d"]
    rules = options["rules"]
    remove = flags["r"]
    write = flags["w"]
    list = flags["l"]
    invert = flags["n"]
    log = flags["g"]
    abslog = flags["a"]
    equi = flags["e"]

    if raster == "":
        raster = None

    if volume == "":
        volume = None

    if rules == "":
        rules = None

    if color == "":
        color = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", None, None, None)

    if rows:
        # Create the r.colors input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flags_ = ""
        if (remove):
            flags_ += "r"
        if (write):
            flags_ += "w"
        if (list):
            flags_ += "l"
        if (invert):
            flags_ += "n"
        if (log):
            flags_ += "g"
        if (abslog):
            flags_ += "a"
        if (equi):
            flags_ += "e"

        try:
            grass.run_command("r.colors",
                              flags=flags_,
                              file=filename,
                              color=color,
                              raster=raster,
                              volume=volume,
                              rules=rules,
                              overwrite=grass.overwrite())
        except CalledModuleError:
            grass.fatal(_("Error in r.colors call"))
예제 #36
0
파일: utils.py 프로젝트: neteler/grass
def checkSeriesCompatibility(mapSeriesList=None, timeseriesList=None):
    """Checks whether time series (map series and stds) are compatible,
    which means they have equal number of maps ad times (in case of stds).
    This is needed within layer list, not within the entire animation tool.
    Throws GException if these are incompatible.

    :return: number of maps for animation
    """
    timeseriesInfo = {
        "count": set(),
        "temporalType": set(),
        "mapType": set(),
        "mapTimes": set(),
    }

    if timeseriesList:
        for stds, etype in timeseriesList:
            sp = tgis.open_old_stds(stds, etype)
            mapType = sp.get_map_time()  # interval, ...
            tempType = sp.get_initial_values()[0]  # absolute
            timeseriesInfo["mapType"].add(mapType)
            timeseriesInfo["temporalType"].add(tempType)
            rows = sp.get_registered_maps_as_objects(where=None,
                                                     order="start_time")

            if rows:
                times = []
                timeseriesInfo["count"].add(len(rows))
                for row in rows:
                    if tempType == "absolute":
                        time = row.get_absolute_time()
                    else:
                        time = row.get_relative_time()
                    times.append(time)
                timeseriesInfo["mapTimes"].add(tuple(times))
            else:
                timeseriesInfo["mapTimes"].add(None)
                timeseriesInfo["count"].add(None)

    if len(timeseriesInfo["count"]) > 1:
        raise GException(
            _("The number of maps in space-time datasets "
              "has to be the same."))

    if len(timeseriesInfo["temporalType"]) > 1:
        raise GException(
            _("The temporal type (absolute/relative) of space-time datasets "
              "has to be the same."))

    if len(timeseriesInfo["mapType"]) > 1:
        raise GException(
            _("The map type (point/interval) of space-time datasets "
              "has to be the same."))

    if len(timeseriesInfo["mapTimes"]) > 1:
        raise GException(
            _("The temporal extents of maps in space-time datasets "
              "have to be the same."))

    if mapSeriesList:
        count = set()
        for mapSeries in mapSeriesList:
            count.add(len(mapSeries))
        if len(count) > 1:
            raise GException(
                _("The number of maps to animate has to be "
                  "the same for each map series."))

        if timeseriesList and list(count)[0] != list(
                timeseriesInfo["count"])[0]:
            raise GException(
                _("The number of maps to animate has to be "
                  "the same as the number of maps in temporal dataset."))

    if mapSeriesList:
        return list(count)[0]
    if timeseriesList:
        return list(timeseriesInfo["count"])[0]
예제 #37
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]

    # Make sure the temporal database exists
    tgis.init()

    mapset = grass.gisenv()["MAPSET"]

    sp = tgis.open_old_stds(input, "strds")

    grass.use_temp_region()

    maps = sp.get_registered_maps_as_objects_by_granularity()
    num_maps = len(maps)
    # get datatype of the first map
    if maps:
        maps[0][0].select()
        datatype = maps[0][0].metadata.get_datatype()
    else:
        datatype = None

    # Get the granularity and set bottom, top and top-bottom resolution
    granularity = sp.get_granularity()

    # This is the reference time to scale the z coordinate
    reftime = datetime(1900, 1, 1)

    # We set top and bottom according to the start time in relation
    # to the date 1900-01-01 00:00:00
    # In case of days, hours, minutes and seconds, a double number
    # is used to represent days and fracs of a day

    # Space time voxel cubes with montly or yearly granularity can not be
    # mixed with other temporal units

    # Compatible temporal units are : days, hours, minutes and seconds
    # Incompatible are years and moths
    start, end = sp.get_temporal_extent_as_tuple()

    if sp.is_time_absolute():
        unit = granularity.split(" ")[1]
        granularity = float(granularity.split(" ")[0])

        print "Gran from stds %0.15f"%(granularity)

        if unit == "years" or unit == "year":
            bottom = float(start.year - 1900)
            top = float(granularity * num_maps)
        elif unit == "months" or unit == "month":
            bottom = float((start.year - 1900) * 12 + start.month)
            top = float(granularity * num_maps)
        else:
            bottom = float(tgis.time_delta_to_relative_time(start - reftime))
            days = 0.0
            hours = 0.0
            minutes = 0.0
            seconds = 0.0
            if unit == "days" or unit == "day":
                days = float(granularity)
            if unit == "hours" or unit == "hour":
                hours = float(granularity)
            if unit == "minutes" or unit == "minute":
                minutes = float(granularity)
            if unit == "seconds" or unit == "second":
                seconds = float(granularity)

            granularity = float(days + hours / 24.0 + minutes / \
                1440.0 + seconds / 86400.0)
    else:
        unit = sp.get_relative_time_unit()
        bottom = start

    top = float(bottom + granularity * float(num_maps))
    try:
        grass.run_command("g.region", t=top, b=bottom, tbres=granularity)
    except CalledModuleError:
        grass.fatal(_("Unable to set 3D region"))

    # Create a NULL map to fill the gaps
    null_map = "temporary_null_map_%i" % os.getpid()
    if datatype == 'DCELL':
        grass.mapcalc("%s = double(null())" % (null_map))
    elif datatype == 'FCELL':
        grass.mapcalc("%s = float(null())" % (null_map))
    else:
        grass.mapcalc("%s = null()" % (null_map))

    if maps:
        count = 0
        map_names = ""
        for map in maps:
            # Use the first map
            id = map[0].get_id()
            # None ids will be replaced by NULL maps
            if id is None:
                id = null_map

            if count == 0:
                map_names = id
            else:
                map_names += ",%s" % id

            count += 1

        try:
            grass.run_command("r.to.rast3", input=map_names,
                              output=output, overwrite=grass.overwrite())
        except CalledModuleError:
            grass.fatal(_("Unable to create 3D raster map <%s>" % output))

    grass.run_command("g.remove", flags='f', type='raster', name=null_map)

    title = _("Space time voxel cube")
    descr = _("This space time voxel cube was created with t.rast.to.rast3")

    # Set the unit
    try:
        grass.run_command("r3.support", map=output, vunit=unit,
                          title=title, description=descr,
                          overwrite=grass.overwrite())
    except CalledModuleError:
        grass.warning(_("%s failed to set units.") % 'r3.support')

    # Register the space time voxel cube in the temporal GIS
    if output.find("@") >= 0:
        id = output
    else:
        id = output + "@" + mapset

    start, end = sp.get_temporal_extent_as_tuple()
    r3ds = tgis.Raster3DDataset(id)

    if r3ds.is_in_db():
        r3ds.select()
        r3ds.delete()
        r3ds = tgis.Raster3DDataset(id)

    r3ds.load()

    if sp.is_time_absolute():
        r3ds.set_absolute_time(start, end)
    else:
        r3ds.set_relative_time(start, end, sp.get_relative_time_unit())

    r3ds.insert()
예제 #38
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    quantile = options["quantile"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(
                _("Processing over 1000 maps: activating -z flag of r.series which slows down processing"
                  ))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series",
                              flags=flag,
                              file=filename,
                              output=output,
                              overwrite=grass.overwrite(),
                              method=method,
                              quantile=quantile)
        except CalledModuleError:
            grass.fatal(
                _("%s failed. Check above error messages.") % 'r.series')

        if not add_time:

            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()

            # We need to set the temporal extent from the subset of selected maps
            maps = sp.get_registered_maps_as_objects(where=where,
                                                     order=order,
                                                     dbif=None)
            first_map = maps[0]
            last_map = maps[-1]
            start_a, end_a = first_map.get_temporal_extent_as_tuple()
            start_b, end_b = last_map.get_temporal_extent_as_tuple()

            if end_b is None:
                end_b = start_b

            if first_map.is_time_absolute():
                extent = tgis.AbsoluteTemporalExtent(start_time=start_a,
                                                     end_time=end_b)
            else:
                extent = tgis.RelativeTemporalExtent(
                    start_time=start_a,
                    end_time=end_b,
                    unit=first_map.get_relative_time_unit())

            map.set_temporal_extent(extent=extent)

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
예제 #39
0
def main():
    strds = options["strds"]
    out_name = options["output"]
    if options["weight"] == '':
        method = None
    else:
        method = options["weight"]
    where = options["where"]
    sep = separator(options["separator"])
    if flags['p'] and not options["splittingday"]:
        gscript.fatal(_("'p' flag required to set also 'splittingday' option"))
    elif flags['p'] and options["splittingday"] and out_name == '-':
        gscript.fatal(_("'output' option is required with 'p' flag"))

    if flags['k'] and flags['p']:
        gscript.fatal(_("It is not possible to use 'k' and 'p' flag together"))
    elif flags['k'] and not method:
        rkappa = True
    elif flags['k'] and method:
        gscript.message(_("If method is different from 'no' it is not possible"
                          " to use r.kappa"))
        rkappa = _load_skll()
    else:
        rkappa = _load_skll()

    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where, "start_time", None)
    if maps is None:
        gscript.fatal(_("Space time raster dataset {st} seems to be "
                        "empty".format(st=strds)))
        return 1

    if flags['p']:
        before, after = _split_maps(maps, options["splittingday"])
        _kappa_pixel(before, after, out_name, method, gscript.overwrite())
        return

    mapnames = [mapp.get_name() for mapp in maps]
    if not rkappa:
        if out_name != '-':
            fi = open(out_name, 'w')
        else:
            fi = sys.stdout
    for i1 in range(len(mapnames)):
        for i2 in range(i1 + 1, len(mapnames)):
            map1 = mapnames[i1]
            map2 = mapnames[i2]
            if map1 != map2:
                if not rkappa:
                    fi.write("{}-{}{}{}\n".format(map1, map2, sep,
                                                  _kappa_skll(map1, map2,
                                                              flags['l'],
                                                              method)))
                else:
                    if out_name != '-':
                        fi = open("{}_{}_{}".format(out_name, map1, map2), 'w')
                    else:
                        fi = sys.stdout
                    fi.write("{}".format(_kappa_grass(map1, map2)))
                    if out_name != '-':
                        fi.close()
    if not rkappa:
        fi.close()

    gscript.message(_("All data have analyzed"))
예제 #40
0
def main(options, flags):

    # Get the options
    csv_file = options["csv"]
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    null_value = options["null_value"]
    separator = options["separator"]

    write_header = flags["n"]

    #output_cat_label = flags["f"]
    #output_color = flags["r"]
    #output_cat = flags["i"]

    overwrite = gscript.overwrite()

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)

    # Setup separator
    if separator == "pipe":
        separator = "|"
    if separator == "comma":
        separator = ","
    if separator == "space":
        separator = " "
    if separator == "tab":
        separator = "\t"
    if separator == "newline":
        separator = "\n"


    r_what = gcore.read_command("r.what", map="dummy",
                                    output="-",
                                    separator=separator,
                                    quiet=True)
    if len(s) == 0:
        gcore.fatal(_('No data returned from query'))

    reader = csv.reader(open(csv_file, "r"), delimiter=separator)

    for line in reader:
        id_, x, y, timestamp = line

        start = tgis.string_to_datetime(timestamp)
        where = "start_time <= \'" + str(start) + "\' AND end_time > \'" + str(start) + "\'"
        rows = sp.get_registered_maps(columns="id", where=where,
                                      dbif=dbif)
        for entry in rows:
            r_what.inputs.map = entry[0]
            r_what.inputs.coordinates = [x,y]
            r_what.run()
            out = "%s%s%s" % (id_, separator, r_what.outputs.stdout)

            sys.stdout.write(out)

    dbif.close()
예제 #41
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]
    type = options["type"]

    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(
            _("Input and aggregation dataset must have "
              "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(
            _("All registered maps of the aggregation dataset "
              "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds", dbif, gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where,
                                                 order="start_time",
                                                 dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(
        where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,
                                             granularity=gran,
                                             map_list=map_list,
                                             topo_list=topo_list,
                                             basename=base,
                                             time_suffix=time_suffix,
                                             offset=offset,
                                             method=method,
                                             nprocs=nprocs,
                                             spatial=None,
                                             overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values(
        )
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                          title, description, semantic_type,
                                          dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,
                                      output_strds, register_null,
                                      sp.get_relative_time_unit(), dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
예제 #42
0
def main():
    #lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    vector_output = options["vector_output"]
    strds = options["strds"]
    where = options["where"]
    columns = options["columns"]

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")

    if len(strds_names) != len(column_names):
        grass.fatal(_("The number of columns must be equal to the number of space time raster datasets"))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = grass.gisenv()["MAPSET"]

    out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite)

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)

    # Single space time raster dataset
    if len(strds_names) == 1:
        rows = first_strds.get_registered_maps(
            columns="name,mapset,start_time,end_time",
            order="start_time", dbif=dbif)

        if not rows:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> is empty") %
                        out_sp.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [row["name"] + "@" + row["mapset"],]

            s = Sample(start, end, raster_maps)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(_("Temporal type of space time raster datasets must be equal\n"
                              "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%
                              {"a":first_strds.get_id(),
                               "type_a":first_strds.get_temporal_type(),
                               "b":dataset.get_id(),
                               "type_b":dataset.get_temporal_type()}))

        mapmatrizes = tgis.sample_stds_by_stds_topology("strds", "strds", strds_names,
                                                      strds_names[0], False, None,
                                                      "equal", False, False)

        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list)
                samples.append(s)

    num_samples = len(samples)

    # Get the layer and database connections of the input vector
    vector_db = grass.vector.vector_db(input)

    # We copy the vector table and create the new layers
    if vector_db:
        # Use the first layer to copy the categories from
        layers = "1,"
    else:
        layers = ""
    first = True
    for layer in range(num_samples):
        layer += 1
        # Skip existing layer
        if vector_db and layer in vector_db and \
           vector_db[layer]["layer"] == layer:
            continue
        if first:
            layers += "%i" % (layer)
            first = False
        else:
            layers += ",%i" % (layer)

    vectmap = vector_output

    # We create a new vector map using the categories of the original map
    try:
        grass.run_command("v.category", input=input, layer=layers,
                          output=vectmap, option="transfer",
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to create new layers for vector map <%s>")
                    % (vectmap))

    title = _("Observaion of space time raster dataset(s) <%s>") % (strds)
    description= _("Observation of space time raster dataset(s) <%s>"
                   " with vector map <%s>") % (strds, input)

    # Create the output space time vector dataset
    out_sp = tgis.open_new_stds(output, "stvds",
                                              first_strds.get_temporal_type(),
                                              title, description,
                                              first_strds.get_semantic_type(),
                                              dbif, overwrite)

    dummy = out_sp.get_new_map_instance(None)

    # Sample the space time raster dataset with the vector
    # map at specific layer with v.what.rast
    count = 1
    for sample in samples:
        raster_names = sample.raster_names

        if len(raster_names) != len(column_names):
            grass.fatal(_("The number of raster maps in a granule must "
                          "be equal to the number of column names"))

        # Create the columns creation string
        columns_string = ""
        for name, column in zip(raster_names, column_names):
            # The column is by default double precision
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"

            tmp_string = "%s %s," %(column, coltype)
            columns_string += tmp_string

        # Remove last comma
        columns_string = columns_string[0:len(columns_string) - 1]

        # Try to add a column
        if vector_db and count in vector_db and vector_db[count]["table"]:
            try:
                grass.run_command("v.db.addcolumn", map=vectmap,
                                  layer=count, column=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add column %s to vector map <%s> "
                              "with layer %i") % (columns_string, vectmap, count))
        else:
            # Try to add a new table
            grass.message("Add table to layer %i" % (count))
            try:
                grass.run_command("v.db.addtable", map=vectmap, layer=count,
                                  columns=columns_string, overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to add table to vector map "
                              "<%s> with layer %i") % (vectmap, count))

        # Call v.what.rast for each raster map
        for name, column in zip(raster_names, column_names):
            try:
                grass.run_command("v.what.rast", map=vectmap,
                                  layer=count, raster=name,
                                  column=column, where=where)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to run v.what.rast for vector map <%s> "
                            "with layer %i and raster map <%s>") %
                            (vectmap, count, str(raster_names)))

        vect = out_sp.get_new_map_instance(dummy.build_id(vectmap,
                                                          mapset, str(count)))
        vect.load()

        start = sample.start
        end = sample.end

        if out_sp.is_time_absolute():
            vect.set_absolute_time(start, end)
        else:
            vect.set_relative_time(
                start, end, first_strds.get_relative_time_unit())

        if vect.is_in_db(dbif):
            vect.update_all(dbif)
        else:
            vect.insert(dbif)

        out_sp.register_map(vect, dbif)
        count += 1

    out_sp.update_from_registered_maps(dbif)
    dbif.close()
예제 #43
0
def main():
    # lazy imports
    import grass.temporal as tgis
    from grass.pygrass.utils import copy as gcopy
    from grass.pygrass.messages import Messenger
    from grass.pygrass.vector import Vector

    # Get the options
    input = options["input"]
    output = options["output"]
    strds = options["strds"]
    where = options["where"]
    tempwhere = options["t_where"]

    if output and flags['u']:
        grass.fatal(_("Cannot combine 'output' option and 'u' flag"))
    elif not output and not flags['u']:
        grass.fatal(_("'output' option or 'u' flag must be given"))
    elif not output and flags['u']:
        grass.warning(
            _("Attribute table of vector {name} will be updated...").format(
                name=input))

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)
    # Single space time raster dataset
    if len(strds_names) == 1:
        granu = first_strds.get_granularity()
        rows = first_strds.get_registered_maps(
            "name,mapset,start_time,end_time", tempwhere, "start_time", dbif)

        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") %
                first_strds.get_id())
        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps, first_strds.get_name(), granu)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(
                    _(
                        "Temporal type of space time raster "
                        "datasets must be equal\n<%(a)s> of type "
                        "%(type_a)s do not match <%(b)s> of type "
                        "%(type_b)s" % {
                            "a": first_strds.get_id(),
                            "type_a": first_strds.get_temporal_type(),
                            "b": dataset.get_id(),
                            "type_b": dataset.get_temporal_type()
                        }))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds", "strds", strds_names, strds_names[0], False, None,
            "equal", False, False)
        #TODO check granularity for multiple STRDS
        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list, name)
                samples.append(s)

    # Get the layer and database connections of the input vector
    if output:
        gcopy(input, output, 'vector')
    else:
        output = input

    msgr = Messenger()
    perc_curr = 0
    perc_tot = len(samples)
    pymap = Vector(output)
    try:
        pymap.open('r')
    except:
        dbif.close()
        grass.fatal(_("Unable to create vector map <%s>" % output))

    if len(pymap.dblinks) == 0:
        try:
            pymap.close()
            grass.run_command("v.db.addtable", map=output)
        except CalledModuleError:
            dbif.close()
            grass.fatal(
                _("Unable to add table <%s> to vector map <%s>" % output))
    if pymap.is_open():
        pymap.close()

    for sample in samples:
        raster_names = sample.raster_names
        # Call v.what.rast for each raster map

        for name in raster_names:
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"
            day = sample.printDay()
            column_name = "%s_%s" % (sample.strds_name, day)
            column_string = "%s %s" % (column_name, coltype)
            column_string.replace('.', '_')
            try:
                grass.run_command("v.db.addcolumn",
                                  map=output,
                                  column=column_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add column %s to vector map "
                      "<%s> ") % (column_string, output))
            try:
                grass.run_command("v.what.rast",
                                  map=output,
                                  raster=name,
                                  column=column_name,
                                  where=where,
                                  quiet=quiet)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to run v.what.rast for vector map"
                      " <%s> and raster map <%s>") %
                    (output, str(raster_names)))

        msgr.percent(perc_curr, perc_tot, 1)
        perc_curr += 1

    dbif.close()
예제 #44
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    strds = options["strds"]
    where = options["where"]
    column = options["column"]
    method = options["method"]
    tempwhere = options["t_where"]
    sampling = options["sampling"]

    if where == "" or where == " " or where == "\n":
        where = None

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "stvds", dbif)
    strds_sp = tgis.open_old_stds(strds, "strds", dbif)

    if strds_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        grass.fatal(
            _("Input and aggregation dataset must "
              "have the same temporal type"))

    # Check if intervals are present in the sample dataset
    if sp.get_temporal_type() == "absolute":
        map_time = sp.absolute_time.get_map_time()
    else:
        map_time = sp.relative_time.get_map_time()

    if map_time != "interval":
        dbif.close()
        grass.fatal(
            _("All registered maps of the space time vector "
              "dataset must have time intervals"))

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", dbif)

    if not rows:
        dbif.close()
        grass.fatal(_("Space time vector dataset <%s> is empty") % sp.get_id())

    # Sample the raster dataset with the vector dataset and run v.what.rast
    for row in rows:
        start = row["start_time"]
        end = row["end_time"]
        vectmap = row["name"] + "@" + row["mapset"]
        layer = row["layer"]

        raster_maps = tgis.collect_map_names(strds_sp, dbif, start, end,
                                             sampling)

        aggreagated_map_name = None

        if raster_maps:
            # Aggregation
            if method != "disabled" and len(raster_maps) > 1:
                # Generate the temporary map name
                aggreagated_map_name = "aggreagated_map_name_" + \
                    str(os.getpid())
                new_map = tgis.aggregate_raster_maps(raster_maps,
                                                     aggreagated_map_name,
                                                     start, end, 0, method,
                                                     False, dbif)
                aggreagated_map_name = aggreagated_map_name + "_0"
                if new_map is None:
                    continue
                # We overwrite the raster_maps list
                raster_maps = (new_map.get_id(), )

            for rastermap in raster_maps:

                if column:
                    col_name = column
                else:
                    # Create a new column with the SQL compliant
                    # name of the sampled raster map
                    col_name = rastermap.split("@")[0].replace(".", "_")

                coltype = "DOUBLE PRECISION"
                # Get raster type
                rasterinfo = raster.raster_info(rastermap)
                if rasterinfo["datatype"] == "CELL":
                    coltype = "INT"

                try:
                    if layer:
                        grass.run_command("v.db.addcolumn",
                                          map=vectmap,
                                          layer=layer,
                                          column="%s %s" % (col_name, coltype),
                                          overwrite=grass.overwrite())
                    else:
                        grass.run_command("v.db.addcolumn",
                                          map=vectmap,
                                          column="%s %s" % (col_name, coltype),
                                          overwrite=grass.overwrite())
                except CalledModuleError:
                    dbif.close()
                    grass.fatal(
                        _("Unable to add column %s to vector map <%s>") %
                        (col_name, vectmap))

                # Call v.what.rast
                try:
                    if layer:
                        grass.run_command("v.what.rast",
                                          map=vectmap,
                                          layer=layer,
                                          raster=rastermap,
                                          column=col_name,
                                          where=where)
                    else:
                        grass.run_command("v.what.rast",
                                          map=vectmap,
                                          raster=rastermap,
                                          column=col_name,
                                          where=where)
                except CalledModuleError:
                    dbif.close()
                    grass.fatal(
                        _("Unable to run v.what.rast for vector map "
                          "<%s> and raster map <%s>") % (vectmap, rastermap))

                if aggreagated_map_name:
                    try:
                        grass.run_command("g.remove",
                                          flags='f',
                                          type='raster',
                                          name=aggreagated_map_name)
                    except CalledModuleError:
                        dbif.close()
                        grass.fatal(
                            _("Unable to remove raster map <%s>") %
                            (aggreagated_map_name))

                # Use the first map in case a column names was provided
                if column:
                    break

    dbif.close()
예제 #45
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    gran = options["granularity"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    file_limit = options["file_limit"]
    time_suffix = options["suffix"]

    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)

    map_list = sp.get_registered_maps_as_objects(where=where,
                                                 order="start_time",
                                                 dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds", dbif, gcore.overwrite())

    start_time = map_list[0].temporal_extent.get_start_time()

    if sp.is_time_absolute():
        start_time = tgis.adjust_datetime_to_granularity(start_time, gran)

    # We use the end time first
    end_time = map_list[-1].temporal_extent.get_end_time()
    has_end_time = True

    # In case no end time is available, then we use the start time of the last map layer
    if end_time is None:
        end_time = map_list[-1].temporal_extent.get_start_time()
        has_end_time = False

    granularity_list = []

    # Build the granularity list
    while True:
        if has_end_time is True:
            if start_time >= end_time:
                break
        else:
            if start_time > end_time:
                break

        granule = tgis.RasterDataset(None)
        start = start_time
        if sp.is_time_absolute():
            end = tgis.increment_datetime_by_string(start_time, gran)
            granule.set_absolute_time(start, end)
        else:
            end = start_time + int(gran)
            granule.set_relative_time(start, end, sp.get_relative_time_unit())
        start_time = end

        granularity_list.append(granule)

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,
                                             granularity=gran,
                                             map_list=map_list,
                                             topo_list=topo_list,
                                             basename=base,
                                             time_suffix=time_suffix,
                                             offset=offset,
                                             method=method,
                                             nprocs=nprocs,
                                             spatial=None,
                                             overwrite=gcore.overwrite(),
                                             file_limit=file_limit)

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values(
        )
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                          title, description, semantic_type,
                                          dbif, gcore.overwrite())
        if register_null:
            register_null = False
        else:
            register_null = True

        tgis.register_map_object_list("rast", output_list,
                                      output_strds, register_null,
                                      sp.get_relative_time_unit(), dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
예제 #46
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    base = options["basename"]
    where = options["where"]
    nprocs = options["nprocs"]
    tsuffix = options["suffix"]

    mapset = grass.encode(grass.gisenv()["MAPSET"])

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds")

    maps = sp.get_registered_maps_as_objects_with_gaps(where, dbif)

    num = len(maps)

    # Configure the r.to.vect module
    gapfill_module = pymod.Module(
        "r.series.interp",
        overwrite=grass.overwrite(),
        quiet=True,
        run_=False,
        finish_=False,
    )

    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    gap_list = []
    overwrite_flags = {}

    # Identify all gaps and create new names
    count = 0
    for _map in maps:
        if _map.get_id() is None:
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix in [
                    'gran', 'time'
            ]:
                _id = "{ba}@{ma}".format(ba=base, ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(base, num + count,
                                                      tsuffix)
                _id = "{name}@{ma}".format(name=map_name, ma=mapset)
            _map.set_id(_id)

            gap_list.append(_map)

    if len(gap_list) == 0:
        grass.message(_("No gaps found"))
        return

    # Build the temporal topology
    tb = tgis.SpatioTemporalTopologyBuilder()
    tb.build(maps)

    # Do some checks before computation
    for _map in gap_list:
        if not _map.get_precedes() or not _map.get_follows():
            grass.fatal(
                _("Unable to determine successor "
                  "and predecessor of a gap."))

        if len(_map.get_precedes()) > 1:
            grass.warning(
                _("More than one successor of the gap found. "
                  "Using the first found."))

        if len(_map.get_follows()) > 1:
            grass.warning(
                _("More than one predecessor of the gap found. "
                  "Using the first found."))

    # Interpolate the maps using parallel processing
    result_list = []

    for _map in gap_list:
        predecessor = _map.get_follows()[0]
        successor = _map.get_precedes()[0]

        gran = sp.get_granularity()
        tmpval, start = predecessor.get_temporal_extent_as_tuple()
        end, tmpval = successor.get_temporal_extent_as_tuple()

        # Now resample the gap
        map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity(
            (_map, ), start, end, gran)

        map_names = []
        map_positions = []

        increment = 1.0 / (len(map_matrix) + 1.0)
        position = increment
        count = 0
        for intp_list in map_matrix:
            new_map = intp_list[0]
            count += 1
            if sp.get_temporal_type() == 'absolute' and tsuffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    new_map.temporal_extent.get_start_time(),
                    sp.get_granularity())
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            elif sp.get_temporal_type() == 'absolute' and tsuffix == 'time':
                suffix = tgis.create_time_suffix(new_map)
                new_id = "{ba}_{su}@{ma}".format(ba=new_map.get_name(),
                                                 su=suffix,
                                                 ma=mapset)
            else:
                map_name = tgis.create_numeric_suffix(new_map.get_name(),
                                                      count, tsuffix)
                new_id = "{name}@{ma}".format(name=map_name, ma=mapset)

            new_map.set_id(new_id)

            overwrite_flags[new_id] = False
            if new_map.map_exists() or new_map.is_in_db(dbif):
                if not grass.overwrite():
                    grass.fatal(
                        _("Map with name <%s> already exists. "
                          "Please use another base name." % (_id)))
                else:
                    if new_map.is_in_db(dbif):
                        overwrite_flags[new_id] = True

            map_names.append(new_map.get_name())
            map_positions.append(position)
            position += increment

            result_list.append(new_map)

        mod = copy.deepcopy(gapfill_module)
        mod(input=(predecessor.get_map_id(), successor.get_map_id()),
            datapos=(0, 1),
            output=map_names,
            samplingpos=map_positions)
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Insert new interpolated maps in temporal database and dataset
    for _map in result_list:
        id = _map.get_id()
        if overwrite_flags[id] == True:
            if _map.is_time_absolute():
                start, end = _map.get_absolute_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_absolute_time(start, end)
            else:
                start, end, unit = _map.get_relative_time()
                if _map.is_in_db():
                    _map.delete(dbif)
                _map = sp.get_new_map_instance(id)
                _map.set_relative_time(start, end, unit)
        _map.load()
        _map.insert(dbif)
        sp.register_map(_map, dbif)

    sp.update_from_registered_maps(dbif)
    sp.update_command_string(dbif=dbif)
    dbif.close()
예제 #47
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    register_null = flags["n"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the HANTS module
    hants_flags = ""
    if flags["l"]:
        hants_flags = hants_flags + 'l'
    if flags["h"]:
        hants_flags = hants_flags + 'h'
    if flags["i"]:
        hants_flags = hants_flags + 'i'

    kwargs = dict()
    kwargs['nf'] = options['nf']
    if options['fet']:
        kwargs['fet'] = options['fet']
    kwargs['dod'] = options['dod']
    if options['range']:
        kwargs['range'] = options['range']
    kwargs['suffix'] = "_hants"
    if len(hants_flags) > 0:
        kwargs['flags'] = hants_flags

    count = 0
    num_maps = len(maps)
    new_maps = []

    maplistfile = script.tempfile()
    fd = open(maplistfile, 'w')

    # create list of input maps and their time stamps
    for map in maps:
        count += 1
        map_name = "{ba}_hants".format(ba=map.get_id())

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        f.write("{0}\n".format(map.get_id()))

    f.close()

    # run r.hants
    grass.run_command('r.hants',
                      file=maplistfile,
                      suffix="_hants",
                      quiet=True,
                      **kwargs)

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
예제 #48
0
def main():
    # lazy imports
    import grass.temporal as tgis

    strdsin = options["strds"]
    rasterin = options["raster"]
    strdsout = options["output"]
    bandname = options["bandname"]

    type_ = "strds"

    # make sure the temporal database exists
    tgis.init()

    dbif, connection_state_changed = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if strdsin.find("@") >= 0:
        strdsid_ = strdsin
    else:
        strdsid_ = strdsin + "@" + grass.gisenv()["MAPSET"]

    if rasterin.find("@") >= 0:
        rasterid_ = rasterin
    else:
        rasterid_ = rasterin + "@" + grass.gisenv()["MAPSET"]

    datasetin = tgis.dataset_factory(type_, strdsid_)

    if not datasetin.is_in_db(dbif):
        grass.fatal(
            _("Dataset <{n}> of type <{t}> not found in temporal database").
            format(n=strdsid_, t=type_))

    datasetin.select(dbif)

    start_time = datasetin.temporal_extent.get_start_time()
    end_time = datasetin.temporal_extent.get_end_time()

    # create a new strds using the old strds as template

    # specs of input strds
    sp = tgis.open_old_stds(strdsid_, "strds", dbif)
    ttype, stype, title, descr = sp.get_initial_values()
    dbif.close()

    # t.create, use specs of input strds
    grass.run_command('t.create',
                      type='strds',
                      output=strdsout,
                      temporaltype=ttype,
                      semantictype=stype,
                      title=title,
                      description=descr)

    # register the raster map in the new strds
    rlistfile = grass.tempfile(create=False)
    fd = open(rlistfile, "w")
    if bandname is not None:
        fd.write("%s|%s|%s|%s\n" %
                 (rasterid_, str(start_time), str(end_time), bandname))
    else:
        fd.write("%s|%s|%s\n" % (rasterid_, str(start_time), str(end_time)))
    fd.close()
    # t.register to create new strds
    grass.run_command('t.register', input=strdsout, file=rlistfile)
    grass.try_remove(rlistfile)
예제 #49
0
def main(options, flags):

    # Get the options
    points = options["points"]
    coordinates = options["coordinates"]
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    layout = options["layout"]
    null_value = options["null_value"]
    separator = options["separator"]

    nprocs = int(options["nprocs"])
    write_header = flags["n"]
    use_stdin = flags["i"]

    # output_cat_label = flags["f"]
    # output_color = flags["r"]
    # output_cat = flags["i"]

    overwrite = gscript.overwrite()

    if coordinates and points:
        gscript.fatal(_("Options coordinates and points are mutually exclusive"))

    if not coordinates and not points and not use_stdin:
        gscript.fatal(_("Please specify the coordinates, the points option or use the 's' option to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates"))

    if use_stdin:
        coordinates_stdin = str(sys.__stdin__.read())
        # Check if coordinates are given with site names or IDs
        stdin_length = len(coordinates_stdin.split('\n')[0].split())
        if stdin_length <= 2:
            site_input = False
        elif stdin_length >= 3:
            site_input = True
    else:
        site_input = False

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, order=order,
                                             dbif=dbif)
    dbif.close()

    if not maps:
        gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id())

    # Setup separator
    if separator == "pipe":
        separator = "|"
    if separator == "comma":
        separator = ","
    if separator == "space":
        separator = " "
    if separator == "tab":
        separator = "\t"
    if separator == "newline":
        separator = "\n"

    # Setup flags are disabled due to test issues
    flags = ""
    # if output_cat_label is True:
    #    flags += "f"
    # if output_color is True:
    #    flags += "r"
    # if output_cat is True:
    #    flags += "i"

    # Configure the r.what module
    if points:
        r_what = pymod.Module("r.what", map="dummy",
                              output="dummy", run_=False,
                              separator=separator, points=points,
                              overwrite=overwrite, flags=flags,
                              quiet=True)
    elif coordinates:
        # Create a list of values
        coord_list = coordinates.split(",")
        r_what = pymod.Module("r.what", map="dummy",
                              output="dummy", run_=False,
                              separator=separator,
                              coordinates=coord_list,
                              overwrite=overwrite, flags=flags,
                              quiet=True)
    elif use_stdin:
        r_what = pymod.Module("r.what", map="dummy",
                              output="dummy", run_=False,
                              separator=separator,
                              stdin_=coordinates_stdin,
                              overwrite=overwrite, flags=flags,
                              quiet=True)
    else:
        grass.error(_("Please specify points or coordinates"))

    if len(maps) < nprocs:
        nprocs = len(maps)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))
    num_maps = len(maps)

    # 400 Maps is the absolute maximum in r.what
    # We need to determie the number of maps that can be processed
    # in parallel

    # First estimate the number of maps per process. We use 400 maps
    # simultaniously as maximum for a single process

    num_loops = int(num_maps / (400 * nprocs))
    remaining_maps = num_maps % (400 * nprocs)

    if num_loops == 0:
        num_loops = 1
        remaining_maps = 0

    # Compute the number of maps for each process
    maps_per_loop = int((num_maps - remaining_maps) / num_loops)
    maps_per_process = int(maps_per_loop / nprocs)
    remaining_maps_per_loop = maps_per_loop % nprocs

    # We put the output files in an ordered list
    output_files = []
    output_time_list = []

    count = 0
    for loop in range(num_loops):
        file_name = gscript.tempfile() + "_%i" % (loop)
        count = process_loop(nprocs, maps, file_name, count, maps_per_process,
                             remaining_maps_per_loop, output_files,
                             output_time_list, r_what, process_queue)

    process_queue.wait()

    gscript.verbose("Number of raster map layers remaining for sampling %i" % (remaining_maps))
    if remaining_maps > 0:
        # Use a single process if less then 100 maps
        if remaining_maps <= 100:
            mod = copy.deepcopy(r_what)
            mod(map=map_names, output=file_name)
            process_queue.put(mod)
        else:
            maps_per_process = int(remaining_maps / nprocs)
            remaining_maps_per_loop = remaining_maps % nprocs

            file_name = "out_remain"
            process_loop(nprocs, maps, file_name, count, maps_per_process,
                         remaining_maps_per_loop, output_files,
                         output_time_list, r_what, process_queue)

    # Wait for unfinished processes
    process_queue.wait()

    # Out the output files in the correct order together
    if layout == "row":
        one_point_per_row_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    elif layout == "col":
        one_point_per_col_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    else:
        one_point_per_timerow_output(separator, output_files, output_time_list,
                                     output, write_header, site_input)
예제 #50
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(
                _("Processing over 1000 maps: activating -z flag of r.series which slows down processing"
                  ))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series",
                              flags=flag,
                              file=filename,
                              output=output,
                              overwrite=grass.overwrite(),
                              method=method)
        except CalledModuleError:
            grass.fatal(
                _("%s failed. Check above error messages.") % 'r.series')

        if not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
예제 #51
0
def main(options, flags):
    import grass.pygrass.modules as pymod
    import grass.temporal as tgis
    from grass.pygrass.vector import VectorTopo

    invect = options["input"]
    if invect.find('@') != -1:
        invect = invect.split('@')[0]
    incol = options["date_column"]
    indate = options["date"]
    strds = options["strds"]
    if strds.find('@') != -1:
        strds_name = strds.split('@')[0]
    else:
        strds_name = strds
    output = options["output"]
    cols = options["columns"].split(',')
    mets = options["method"].split(',')
    gran = options["granularity"]
    dateformat = options["date_format"]
    separator = gscript.separator(options["separator"])

    stdout = False
    if output != '-' and flags['u']:
        gscript.fatal(_("Cannot combine 'output' option and 'u' flag"))
    elif output != '-' and flags['c']:
        gscript.fatal(_("Cannot combine 'output' option and 'c' flag"))
    elif output == '-' and (flags['u'] or flags['c']):
        output = invect
        gscript.warning(_("Attribute table of vector {name} will be updated"
                          "...").format(name=invect))
    else:
        stdout = True
    if flags['c']:
        cols = []
        for m in mets:
            colname = "{st}_{me}".format(st=strds_name, me=m)
            cols.append(colname)
            try:
                pymod.Module("v.db.addcolumn", map=invect, columns="{col} "
                             "double precision".format(col=colname))
            except CalledModuleError:
                gscript.fatal(_("Not possible to create column "
                                "{col}".format(col=colname)))

    if output != '-' and len(cols) != len(mets):
        gscript.fatal(_("'columns' and 'method' options must have the same "
                        "number of elements"))
    tgis.init()
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()
    sp = tgis.open_old_stds(strds, "strds", dbif)

    if sp.get_temporal_type() == 'absolute':
        delta = int(tgis.gran_to_gran(gran, sp.get_granularity(), True))
        if tgis.gran_singular_unit(gran) in ['year', 'month']:
            delta = int(tgis.gran_to_gran(gran, '1 day', True))
            td = timedelta(delta)
        elif tgis.gran_singular_unit(gran) == 'day':
            delta = tgis.gran_to_gran(gran, sp.get_granularity(), True)
            td = timedelta(delta)
        elif tgis.gran_singular_unit(gran) == 'hour':
            td = timedelta(hours=delta)
        elif tgis.gran_singular_unit(gran) == 'minute':
            td = timedelta(minutes=delta)
        elif tgis.gran_singular_unit(gran) == 'second':
            td = timedelta(seconds=delta)
    else:
        if sp.get_granularity() >= int(gran):
            gscript.fatal(_("Input granularity is smaller or equal to the {iv}"
                            " STRDS granularity".format(iv=strds)))
        td = int(gran)
    if incol and indate:
        gscript.fatal(_("Cannot combine 'date_column' and 'date' options"))
    elif not incol and not indate:
        gscript.fatal(_("You have to fill 'date_column' or 'date' option"))
    elif incol:
        try:
            dates = pymod.Module("db.select", flags='c', stdout_=PI,
                                 stderr_=PI, sql="SELECT DISTINCT {dc} from "
                                   "{vmap} order by {dc}".format(vmap=invect,
                                                                 dc=incol))
            mydates = dates.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            gscript.fatal(_("db.select return an error"))
    elif indate:
        mydates = [indate]
        pymap = VectorTopo(invect)
        pymap.open('r')
        if len(pymap.dblinks) == 0:
            try:
                pymap.close()
                pymod.Module("v.db.addtable", map=invect)
            except CalledModuleError:
                dbif.close()
                gscript.fatal(_("Unable to add table <%s> to vector map "
                                "<%s>" % invect))
        if pymap.is_open():
            pymap.close()
        qfeat = pymod.Module("v.category", stdout_=PI, stderr_=PI,
                             input=invect, option='print')
        myfeats = qfeat.outputs["stdout"].value.splitlines()

    if stdout:
        outtxt = ''
    for data in mydates:
        if sp.get_temporal_type() == 'absolute':
            fdata = datetime.strptime(data, dateformat)
        else:
            fdata = int(data)
        if flags['a']:
            sdata = fdata + td
            mwhere = "start_time >= '{inn}' and end_time < " \
                   "'{out}'".format(inn=fdata, out=sdata)
        else:
            sdata = fdata - td
            mwhere = "start_time >= '{inn}' and end_time < " \
                   "'{out}'".format(inn=sdata, out=fdata)
        lines = None
        try:
            r_what = pymod.Module("t.rast.what", points=invect, strds=strds,
                                  layout='timerow', separator=separator,
                                  flags="v", where=mwhere, quiet=True,
                                  stdout_=PI, stderr_=PI)
            lines = r_what.outputs["stdout"].value.splitlines()
        except CalledModuleError:
            pass
        if incol:
            try:
                qfeat = pymod.Module("db.select", flags='c', stdout_=PI,
                                     stderr_=PI, sql="SELECT DISTINCT cat from"
                                     " {vmap} where {dc}='{da}' order by "
                                     "cat".format(vmap=invect, da=data,
                                                  dc=incol))
                myfeats = qfeat.outputs["stdout"].value.splitlines()
            except CalledModuleError:
                gscript.fatal(_("db.select returned an error for date "
                                "{da}".format(da=data)))
        if not lines and stdout:
            for feat in myfeats:
                outtxt += "{di}{sep}{da}".format(di=feat, da=data,
                                                   sep=separator)
                for n in range(len(mets)):
                    outtxt += "{sep}{val}".format(val='*', sep=separator)
                outtxt += "\n"
        if not lines:
            continue
        x = 0
        for line in lines:
            vals = line.split(separator)
            if vals[0] in myfeats:
                try:
                    nvals = np.array(vals[4:]).astype(np.float)
                except ValueError:
                    if stdout:
                        outtxt += "{di}{sep}{da}".format(di=vals[0],
                                                         da=data,
                                                         sep=separator)
                        for n in range(len(mets)):
                            outtxt += "{sep}{val}".format(val='*',
                                                          sep=separator)
                        outtxt += "\n"
                    continue
                if stdout:
                    outtxt += "{di}{sep}{da}".format(di=vals[0], da=data,
                                                     sep=separator)
                for n in range(len(mets)):
                    result = return_value(nvals, mets[n])
                    if stdout:
                        outtxt += "{sep}{val}".format(val=result,
                                                      sep=separator)
                    else:
                        try:
                            if incol:
                                pymod.Module("v.db.update", map=output,
                                             column=cols[n], value=str(result),
                                             where="{dc}='{da}' AND cat="
                                             "{ca}".format(da=data, ca=vals[0],
                                                           dc=incol))
                            else:
                                pymod.Module("v.db.update", map=output,
                                             column=cols[n], value=str(result),
                                             where="cat={ca}".format(ca=vals[0]))
                        except CalledModuleError:
                            gscript.fatal(_("v.db.update return an error"))
                if stdout:
                    outtxt += "\n"
                if x == len(myfeats):
                    break
                else:
                    x += 1
    if stdout:
        print(outtxt)
예제 #52
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output,
                                 "stvds",
                                 dbif=dbif,
                                 overwrite=overwrite)

    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"

    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour",
                                  input="dummy",
                                  output="dummy",
                                  run_=False,
                                  finish_=False,
                                  flags=flags,
                                  overwrite=overwrite,
                                  quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(
                _("The number of parellel r.contour processes was "
                  "reduced to 1 because of the table attribute "
                  "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1

        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="vector",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title, descr, stype,
                                dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove",
                            flags='f',
                            type='vector',
                            name=names,
                            quiet=True)

    dbif.close()
예제 #53
0
def main():
    # Get the options
    input = options["input"]
    output = options["output"]
    strds = options["strds"]
    tempwhere = options["t_where"]
    where = options["where"]
    methods = options["method"]
    percentile = options["percentile"]

    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    if where == "" or where == " " or where == "\n":
        where = None

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)
    # Single space time raster dataset
    if len(strds_names) == 1:
        granu = first_strds.get_granularity()
        rows = first_strds.get_registered_maps(
            "name,mapset,start_time,end_time", tempwhere, "start_time", dbif)
        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") %
                first_strds.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps, first_strds.get_name(), granu)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(
                    _(
                        "Temporal type of space time raster "
                        "datasets must be equal\n<%(a)s> of type "
                        "%(type_a)s do not match <%(b)s> of type "
                        "%(type_b)s" % {
                            "a": first_strds.get_id(),
                            "type_a": first_strds.get_temporal_type(),
                            "b": dataset.get_id(),
                            "type_b": dataset.get_temporal_type()
                        }))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds", "strds", strds_names, strds_names[0], False, None,
            "equal", False, False)
        #TODO check granularity for multiple STRDS
        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list, name)
                samples.append(s)
    # Get the layer and database connections of the input vector
    if where:
        try:
            grass.run_command("v.extract",
                              input=input,
                              where=where,
                              output=output)
        except CalledModuleError:
            dbif.close()
            grass.fatal(
                _("Unable to run v.extract for vector map"
                  " <%s> and where <%s>") % (input, where))
    else:
        gcopy(input, output, 'vector')

    msgr = Messenger()
    perc_curr = 0
    perc_tot = len(samples)
    pymap = Vector(output)
    try:
        pymap.open('r')
    except:
        dbif.close()
        grass.fatal(_("Unable to create vector map <%s>" % output))
    pymap.close()

    for sample in samples:
        raster_names = sample.raster_names
        # Call v.what.rast for each raster map
        for name in raster_names:
            day = sample.printDay()
            column_name = "%s_%s" % (sample.strds_name, day)
            try:
                grass.run_command("v.rast.stats",
                                  map=output,
                                  raster=name,
                                  column=column_name,
                                  method=methods,
                                  percentile=percentile,
                                  quiet=quiet,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to run v.what.rast for vector map"
                      " <%s> and raster map <%s>") % (output, name))

        msgr.percent(perc_curr, perc_tot, 1)
        perc_curr += 1

    dbif.close()
예제 #54
0
def main():

    # Get the options
    input = options["input"]
    elevation = options["elevation"]
    expdir = options["expdir"]
    where = options["where"]
    null = options["null"]
    use_pdata = flags["p"]
    coorcorr = flags["c"]
    use_granularity = flags["g"]

    # Make sure the temporal database exists
    tgis.init()

    if not os.path.exists(expdir):
        grass.fatal(_("Export directory <%s> not found.") % expdir)

    os.chdir(expdir)

    sp = tgis.open_old_stds(input, "strds")

    if use_granularity:
        # Attention: A list of lists of maps will be returned
        maps = sp.get_registered_maps_as_objects_by_granularity()
        # Create a NULL map in case of granularity support
        null_map = "temporary_null_map_%i" % os.getpid()
        grass.mapcalc("%s = null()" % (null_map))
    else:
        maps = sp.get_registered_maps_as_objects(where, "start_time", None)

    # To have scalar values with the same name, we need to copy the
    # raster maps using a single name
    map_name = "%s_%i" % (sp.base.get_name(), os.getpid())

    count = 0
    if maps is not None:
        for map in maps:
            if use_granularity:
                if map and len(map) > 0:
                    id = map[0].get_map_id()
            else:
                id = map.get_map_id()
            # None ids will be replaced by NULL maps
            if id is None:
                id = null_map

            grass.run_command("g.copy",
                              rast="%s,%s" % (id, map_name),
                              overwrite=True)
            out_name = "%6.6i_%s.vtk" % (count, sp.base.get_name())

            mflags = ""
            if use_pdata:
                mflags += "p"
            if coorcorr:
                mflags += "c"

            # Export the raster map with r.out.vtk
            if elevation:
                ret = grass.run_command("r.out.vtk",
                                        flags=mflags,
                                        null=null,
                                        input=map_name,
                                        elevation=elevation,
                                        output=out_name,
                                        overwrite=grass.overwrite())
            else:
                ret = grass.run_command("r.out.vtk",
                                        flags=mflags,
                                        null=null,
                                        input=map_name,
                                        output=out_name,
                                        overwrite=grass.overwrite())
            if ret != 0:
                grass.fatal(_("Unable to export raster map <%s>" % map_name))

            count += 1

    if use_granularity:
        grass.run_command("g.remove", flags='f', type='rast', pattern=null_map)
    grass.run_command("g.remove", flags='f', type='rast', pattern=map_name)
예제 #55
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    sort = options["sort"]
    add_time = flags["t"]
    patch_s = flags["s"]
    patch_z = flags["z"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, "start_time", None)

    if rows:

        ordered_rasts = []
        # newest images are first
        if sort == 'desc':
            rows_sorted = rows[::-1]
        # older images are first
        elif sort == 'asc':
            rows_sorted = rows

        for row in rows_sorted:
            string = "%s" % (row["id"])
            ordered_rasts.append(string)

        patch_flags = ""
        if patch_z:
            patch_flags += "z"
        if patch_s:
            patch_flags += "s"

        try:
            grass.run_command("r.patch",
                              overwrite=grass.overwrite(),
                              input=(',').join(ordered_rasts),
                              output=output,
                              flags=patch_flags)
        except CalledModuleError:
            grass.fatal(
                _("%s failed. Check above error messages.") % 'r.patch')

        if not add_time:

            # We need to set the temporal extent from the subset of selected maps
            maps = sp.get_registered_maps_as_objects(where=where,
                                                     order="start_time",
                                                     dbif=None)
            first_map = maps[0]
            last_map = maps[-1]
            start_a, end_a = first_map.get_temporal_extent_as_tuple()
            start_b, end_b = last_map.get_temporal_extent_as_tuple()

            if end_b is None:
                end_b = start_b

            if first_map.is_time_absolute():
                extent = tgis.AbsoluteTemporalExtent(start_time=start_a,
                                                     end_time=end_b)
            else:
                extent = tgis.RelativeTemporalExtent(
                    start_time=start_a,
                    end_time=end_b,
                    unit=first_map.get_relative_time_unit())

            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()

            map.set_temporal_extent(extent=extent)

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
예제 #56
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    name = options["input"]
    type = options["type"]
    where = options["where"]
    temporal_relations = flags["m"]
    spatio_temporal_relations = flags["s"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(name, type)

    # Get ordered map list
    maps = sp.get_registered_maps_as_objects(where=where,
                                             order="start_time",
                                             dbif=None)

    spatial = None

    if spatio_temporal_relations:
        if sp.get_type() == "strds":
            spatial = "2D"
        else:
            spatial = "3D"

    if temporal_relations or spatio_temporal_relations:
        sp.print_spatio_temporal_relationships(maps=maps, spatial=spatial)
        return

    sp.base.print_info()

    #      0123456789012345678901234567890
    print(
        " +-------------------- Temporal topology -------------------------------------+"
    )
    if where:
        print(" | Is subset of dataset: ...... True")
    else:
        print(" | Is subset of dataset: ...... False")

    check = sp.check_temporal_topology(maps)
    if check:
        #      0123456789012345678901234567890
        print(" | Temporal topology is: ...... valid")
    else:
        #      0123456789012345678901234567890
        print(" | Temporal topology is: ...... invalid")

    dict_ = sp.count_temporal_types(maps)

    for key in dict_.keys():
        if key == "interval":
            #      0123456789012345678901234567890
            print(" | Number of intervals: ....... %s" % (dict_[key]))
        if key == "point":
            print(" | Number of points: .......... %s" % (dict_[key]))
        if key == "invalid":
            print(" | Invalid time stamps: ....... %s" % (dict_[key]))

    #      0123456789012345678901234567890
    print(" | Number of gaps: ............ %i" % sp.count_gaps(maps))

    if sp.is_time_absolute():
        gran = tgis.compute_absolute_time_granularity(maps)
    else:
        gran = tgis.compute_relative_time_granularity(maps)
    print(" | Granularity: ............... %s" % str(gran))

    print(
        " +-------------------- Topological relations ---------------------------------+"
    )
    dict_ = sp.count_temporal_relations(maps)

    if dict_:
        for key in dict_.keys():
            if key == "equal":
                #      0123456789012345678901234567890
                print(" | Equal:...................... %s" % (dict_[key]))
            if key == "during":
                print(" | During: .................... %s" % (dict_[key]))
            if key == "contains":
                print(" | Contains: .................. %s" % (dict_[key]))
            if key == "overlaps":
                print(" | Overlaps: .................. %s" % (dict_[key]))
            if key == "overlapped":
                print(" | Overlapped: ................ %s" % (dict_[key]))
            if key == "after":
                print(" | After: ..................... %s" % (dict_[key]))
            if key == "before":
                print(" | Before: .................... %s" % (dict_[key]))
            if key == "starts":
                print(" | Starts: .................... %s" % (dict_[key]))
            if key == "finishes":
                print(" | Finishes: .................. %s" % (dict_[key]))
            if key == "started":
                print(" | Started: ................... %s" % (dict_[key]))
            if key == "finished":
                print(" | Finished: .................. %s" % (dict_[key]))
            if key == "follows":
                print(" | Follows: ................... %s" % (dict_[key]))
            if key == "precedes":
                print(" | Precedes: .................. %s" % (dict_[key]))
    print(
        " +----------------------------------------------------------------------------+"
    )
예제 #57
0
def main():

    # Get the options
    input = options["input"]
    where = options["where"]
    columns = options["columns"]
    tempwhere = options["t_where"]
    layer = options["layer"]
    separator = grass.separator(options["separator"])

    if where == "" or where == " " or where == "\n":
        where = None

    if columns == "" or columns == " " or columns == "\n":
        columns = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "stvds")

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", None)

    col_names = ""
    if rows:
        for row in rows:
            vector_name = "%s@%s" % (row["name"], row["mapset"])
            # In case a layer is defined in the vector dataset,
            # we override the option layer
            if row["layer"]:
                layer = row["layer"]

            select = grass.read_command("v.db.select",
                                        map=vector_name,
                                        layer=layer,
                                        columns=columns,
                                        separator="%s" % (separator),
                                        where=where)

            if not select:
                grass.fatal(
                    _("Unable to run v.db.select for vector map <%s> "
                      "with layer %s") % (vector_name, layer))
            # The first line are the column names
            list = select.split("\n")
            count = 0
            for entry in list:
                if entry.strip() != "":
                    # print the column names in case they change
                    if count == 0:
                        col_names_new = "start_time%send_time%s%s" % (
                            separator, separator, entry)
                        if col_names != col_names_new:
                            col_names = col_names_new
                            print col_names
                    else:
                        if row["end_time"]:
                            print "%s%s%s%s%s" % (row["start_time"], separator,
                                                  row["end_time"], separator,
                                                  entry)
                        else:
                            print "%s%s%s%s" % (row["start_time"], separator,
                                                separator, entry)
                    count += 1
예제 #58
0
def main():

    # Get the options
    datasets = options["inputs"]
    file = options["file"]
    type = options["type"]
    recursive = flags["r"]
    force = flags["f"]

    if recursive and not force:
        grass.fatal(_("The recursive flag works only in conjunction with the force flag: use -rf"))

    if datasets and file:
        grass.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file"))

    # Make sure the temporal database exists
    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    dataset_list = []

    # Dataset names as comma separated string
    if datasets:
        if datasets.find(",") == -1:
            dataset_list = (datasets,)
        else:
            dataset_list = tuple(datasets.split(","))

    # Read the dataset list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            line_list = line.split("\n")
            dataset_name = line_list[0]
            dataset_list.append(dataset_name)

    statement = ""

    # Create the pygrass Module object for g.remove
    remove = pyg.Module("g.remove", quiet=True, flags='f', run_=False)

    for name in dataset_list:
        name = name.strip()
        sp = tgis.open_old_stds(name, type, dbif)

        if recursive and force:
            grass.message(_("Removing registered maps and %s" % type))
            maps = sp.get_registered_maps_as_objects(dbif=dbif)
            map_statement = ""
            count = 1
            name_list = []
            for map in maps:
                map.select(dbif)
                # We may have multiple layer for a single map, hence we need
                # to avoid multiple deletation of the same map,
                # but the database entries are still present and must be removed
                if map.get_name() not in name_list:
                    name_list.append(str(map.get_name()))
                map_statement += map.delete(dbif=dbif, execute=False)

                count += 1
                # Delete every 100 maps
                if count%100 == 0:
                    dbif.execute_transaction(map_statement)
                    if type == "strds":
                        remove(type="raster", name=name_list, run_=True)
                    if type == "stvds":
                        remove(type="vector", name=name_list, run_=True)
                    if type == "str3ds":
                        remove(type="raster_3d", name=name_list, run_=True)
                    map_statement = ""
                    name_list = []

            if map_statement:
                dbif.execute_transaction(map_statement)
            if name_list:
                if type == "strds":
                    remove(type="raster", name=name_list, run_=True)
                if type == "stvds":
                    remove(type="vector", name=name_list, run_=True)
                if type == "str3ds":
                    remove(type="raster_3d", name=name_list, run_=True)
        else:
            grass.message(_("Note: registered maps themselves have not been removed, only the %s" % type))

        statement += sp.delete(dbif=dbif, execute=False)

    # Execute the collected SQL statenents
    dbif.execute_transaction(statement)

    dbif.close()
예제 #59
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
def main():
    # Get the options
    input = options["input"]
    timestamp_column = options["timestamp_column"]
    columns = options["column"]
    layer = options["layer"]
    where = options["where"]
    strds = options["strds"]
    tempwhere = options["t_where"]
    i_flag = flags["i"]

    if where == "" or where == " " or where == "\n":
        where = None

    # overwrite = grass.overwrite()

    # Set verbosity level
    # quiet = True
    # if grass.verbosity() > 2:
    #     quiet = False

    grass.warning(_('This addon is experimental!'))

    # Check DB connection for input vector map
    dbcon = grass.vector_layer_db(input, layer)
    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")
    if not len(column_names) == len(strds_names):
        grass.fatal(_('Number of columns and number of STRDS does not match.'))

    # Check type of timestamp column
    cols = grass.vector_columns(input, layer=layer)
    if timestamp_column not in cols.keys():
        grass.fatal(
            _('Could not find column {} \
                    in table connected to vector map {} \
                    at layer {}'.format(timestamp_column, input, layer)))
    if cols[timestamp_column]['type'] != 'DATE':
        if dbcon['driver'] != 'sqlite':
            # Note that SQLite does not have a DATE datatype and
            # and an index does not significantly speedup the process
            # (at least not with a couple of 100 points)
            grass.warning(
                _('Timestamp column is of type {}. \
                            It is recommended to use DATE type with an index. \
                            '.format(cols[timestamp_column]['type'])))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # Limit temporal extent to extent of points if no tempwhere is given
    if not tempwhere:
        extent = []
        for stat in ('min', 'max'):
            tsql = "SELECT {}({}) FROM {}".format(stat, timestamp_column,
                                                  dbcon['table'])
            extent.append(grass.read_command('db.select', flags='c', sql=tsql))

        grass.verbose(
            _('Temporal extent of vector points map is \
                      {} to {}'.format(extent[0], extent[1])))
    else:
        tempwhere = '({}) AND '.format(tempwhere)

    # Loop over STRDS
    counter = 0
    for strds_name in strds_names:

        cur_strds = tgis.open_old_stds(strds_name, "strds", dbif)

        granu = cur_strds.get_granularity()
        start_time = tgis.datetime_math.check_datetime_string(extent[0])
        start_gran = tgis.datetime_math.adjust_datetime_to_granularity(
            start_time, granu).isoformat()
        tempwhere += "(end_time > '{}' and start_time <= '{}')".format(
            start_gran, extent[1])  # needs to be set properly

        # Get info on registered maps in STRDS
        rows = cur_strds.get_registered_maps("name,mapset,start_time,end_time",
                                             tempwhere, "start_time", dbif)

        # Check temporal type and
        # define sampling function to use
        # becomes relevant when temporal type relative gets implemented
        if cur_strds.is_time_relative():
            grass.fatal(
                _('Sorry, STRDS of relative temporal type is not (yet) supported'
                  ))
            sample = sample_relative
        else:
            sample = sample_absolute

        # Check if there are raster maps to sample from that fullfill
        # temporal conditions
        if not rows and not tempwhere:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty".format(
                    cur_strds.get_id())))
        elif not rows and tempwhere:
            dbif.close()
            grass.fatal(
                _("No maps selected from Space time raster dataset <%s>, \
                          or dataset is empty".format(cur_strds.get_id())))

        # Include temporal condition into where clause
        where_clause = '({}) AND '.format(where) if where else ''

        # Loop over registered maps in STRDS
        row_number = 0
        for row in rows:
            # If r.what had a where option, r.what could be used to
            # collect raster values (without interpolation)
            # in a ParallelModuleQueue to collect values using multiple
            # cores and then upload results in one operation

            sample(input, layer, timestamp_column, column_names[counter], row,
                   where_clause, i_flag)

            row_number += 1
            grass.percent(row_number, len(rows), 3)
        counter = counter + 1

    dbif.close()
    grass.vector_history(input)