Esempio n. 1
0
    def test_absolute_time_2(self):
        """Register vector maps in the temporal database and in addition
        in a stvds using the object method deleting empty maps

        :return:
        """
        tgis.register_maps_in_space_time_dataset(
            type="vector",
            name=None,
            maps="register_map_1,register_map_2,register_map_empty",
            start="2001-01-01",
            increment="1 day",
            interval=True,
        )

        map_1 = tgis.VectorDataset("register_map_1@" +
                                   tgis.get_current_mapset())
        map_1.select()
        start, end = map_1.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 2))

        map_2 = tgis.VectorDataset("register_map_2@" +
                                   tgis.get_current_mapset())
        map_2.select()
        start, end = map_2.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 1, 3))

        map_3 = tgis.VectorDataset("register_map_empty@" +
                                   tgis.get_current_mapset())
        map_3.select()
        start, end = map_3.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 3))
        self.assertEqual(end, datetime.datetime(2001, 1, 4))

        map_list = [map_1, map_2, map_3]

        tgis.register_map_object_list(
            type="vector",
            map_list=map_list,
            output_stds=self.stvds_abs,
            delete_empty=True,
        )
        self.stvds_abs.select()
        start, end = self.stvds_abs.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 3))

        map_3 = tgis.VectorDataset("register_map_empty@" +
                                   tgis.get_current_mapset())
        self.assertEqual(map_3.map_exists(), False)
Esempio n. 2
0
    def test_absolute_time_2(self):
        """Test the registration of maps with absolute time
        using register_maps_in_space_time_dataset() and register_map_object_list() with empty map deletion
        """
        tgis.register_maps_in_space_time_dataset(
            type="raster",
            name=None,
            maps="register_map_1,register_map_2,register_map_null",
            start="2001-01-01 10:30:01",
            increment="8 hours",
            interval=False,
        )

        map_1 = tgis.RasterDataset("register_map_1@" +
                                   tgis.get_current_mapset())
        map_1.select()
        start, end = map_1.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1, 10, 30, 1))

        map_2 = tgis.RasterDataset("register_map_2@" +
                                   tgis.get_current_mapset())
        map_2.select()
        start, end = map_2.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1, 18, 30, 1))

        map_3 = tgis.RasterDataset("register_map_null@" +
                                   tgis.get_current_mapset())
        map_3.select()
        start, end = map_3.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2, 2, 30, 1))

        map_list = [map_1, map_2, map_3]

        tgis.register_map_object_list(
            type="raster",
            map_list=map_list,
            output_stds=self.strds_abs,
            delete_empty=True,
        )
        self.strds_abs.select()
        start, end = self.strds_abs.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1, 10, 30, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 1, 18, 30, 1))

        map_3 = tgis.VectorDataset("register_map_null@" +
                                   tgis.get_current_mapset())
        self.assertEqual(map_3.map_exists(), False)
Esempio n. 3
0
    def test_absolute_time_1(self):
        """Test the registration of maps with absolute time
        using register_maps_in_space_time_dataset() and register_map_object_list()
        """
        tgis.register_maps_in_space_time_dataset(
            type="raster",
            name=None,
            maps="register_map_1,register_map_2",
            start="2001-01-01",
            increment="1 day",
            interval=True,
        )

        map_1 = tgis.RasterDataset("register_map_1@" +
                                   tgis.get_current_mapset())
        map_1.select()
        start, end = map_1.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 2))

        map_2 = tgis.RasterDataset("register_map_2@" +
                                   tgis.get_current_mapset())
        map_2.select()
        start, end = map_2.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 2))
        self.assertEqual(end, datetime.datetime(2001, 1, 3))

        map_list = [map_1, map_2]

        tgis.register_map_object_list(
            type="raster",
            map_list=map_list,
            output_stds=self.strds_abs,
            delete_empty=False,
        )
        self.strds_abs.select()
        start, end = self.strds_abs.get_absolute_time()
        self.assertEqual(start, datetime.datetime(2001, 1, 1))
        self.assertEqual(end, datetime.datetime(2001, 1, 3))
Esempio n. 4
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    gran = options["granularity"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = flags["s"]
    
    topo_list = sampling.split(",")

    tgis.init()
    
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)

    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())
    
    start_time = map_list[0].temporal_extent.get_start_time()

    if sp.is_time_absolute():
        start_time = tgis.adjust_datetime_to_granularity(start_time,  gran)

    # We use the end time first
    end_time = map_list[-1].temporal_extent.get_end_time()
    has_end_time = True

    # In case no end time is available, then we use the start time of the last map layer
    if end_time is None:
        end_time = map_list[- 1].temporal_extent.get_start_time()
        has_end_time = False

    granularity_list = []

    # Build the granularity list
    while True:
        if has_end_time is True:
            if start_time >= end_time:
                break
        else:
            if start_time > end_time:
                break

        granule = tgis.RasterDataset(None)
        start = start_time
        if sp.is_time_absolute():
            end = tgis.increment_datetime_by_string(start_time, gran)
            granule.set_absolute_time(start, end)
        else:
            end = start_time + int(gran)
            granule.set_relative_time(start, end,  sp.get_relative_time_unit())
        start_time = end
        
        granularity_list.append(granule)

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
                                                                       map_list=map_list,  
                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
                                                                       overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values()
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                                                 title, description, semantic_type,
                                                                 dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
                                                       sp.get_relative_time_unit(),  dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
Esempio n. 5
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]
    type = options["type"]

    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(
            _("Input and aggregation dataset must have "
              "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(
            _("All registered maps of the aggregation dataset "
              "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds", dbif, gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where,
                                                 order="start_time",
                                                 dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(
        where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(
        granularity_list=granularity_list,
        granularity=gran,
        map_list=map_list,
        topo_list=topo_list,
        basename=base,
        time_suffix=time_suffix,
        offset=offset,
        method=method,
        nprocs=nprocs,
        spatial=None,
        overwrite=gcore.overwrite(),
    )

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values(
        )
        output_strds = tgis.open_new_stds(
            output,
            "strds",
            temporal_type,
            title,
            description,
            semantic_type,
            dbif,
            gcore.overwrite(),
        )
        tgis.register_map_object_list(
            "rast",
            output_list,
            output_strds,
            register_null,
            sp.get_relative_time_unit(),
            dbif,
        )

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
Esempio n. 6
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = flags["s"]
    type = options["type"]
    
    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(_("Input and aggregation dataset must have "
                      "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(_("All registered maps of the aggregation dataset "
                      "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
                                                                       map_list=map_list,  
                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
                                                                       overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values()
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                                                 title, description, semantic_type,
                                                                 dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
                                                       sp.get_relative_time_unit(),  dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
Esempio n. 7
0
def main():

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    basename = options["basename"]
    where = options["where"]
    pyfile = options["pyfile"]
    nrows = int(options["nrows"])

    input_name_list = inputs.split(",")

    input_strds: List[StrdsEntry] = []

    # Import the python code into the current function context
    code = open(pyfile, "r").read()
    projection_kv = gcore.parse_command("g.proj", flags="g")
    epsg_code = projection_kv["epsg"]

    tgis.init()
    mapset = gcore.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    region = Region()
    num_input_maps = 0
    open_output_maps = []

    for input_name in input_name_list:
        sp = tgis.open_old_stds(input_name, "strds", dbif)
        map_list = sp.get_registered_maps_as_objects(where=where,
                                                     order="start_time",
                                                     dbif=dbif)

        if not map_list:
            dbif.close()
            gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

        if nrows == 0:
            dbif.close()
            gcore.fatal(_("Number of rows for the udf must be greater 0."))

        num_input_maps = len(map_list)
        input_strds.append(
            StrdsEntry(dbif=dbif, strds=sp, map_list=map_list, region=region))

    for strds in input_strds:
        if len(strds.map_list) != num_input_maps:
            dbif.close()
            gcore.fatal(
                _("The number of maps in the input STRDS must be equal"))

    # Setup the input strds to compute the output maps and the resulting strds
    mtype = None
    for strds in input_strds:
        strds.setup()
        mtype = strds.mtype

    num_output_maps = count_resulting_maps(input_strds=input_strds,
                                           code=code,
                                           epsg_code=epsg_code)

    if num_output_maps == 1:
        output_map = RasterRow(name=basename)
        output_map.open(mode="w", mtype=mtype, overwrite=gcore.overwrite())
        open_output_maps.append(output_map)
    elif num_output_maps > 1:
        for index in range(num_output_maps):
            output_map = RasterRow(name=basename + f"_{index}", mapset=mapset)
            output_map.open(mode="w", mtype=mtype, overwrite=gcore.overwrite())
            open_output_maps.append(output_map)
    else:
        dbif.close()
        gcore.fatal(_("No result generated") % input)

    # Workaround because time reduction will remove the timestamp
    result_start_times = [datetime.now()]
    first = False

    # Read several rows for each map of each input strds and load them into the udf
    for index in range(0, region.rows, nrows):
        if index + nrows > region.rows:
            usable_rows = index + nrows - region.rows + 1
        else:
            usable_rows = nrows

        # Read all input strds as cubes
        datacubes = []
        for strds in input_strds:
            datacube = strds.to_datacube(index=index, usable_rows=usable_rows)
            datacubes.append(datacube)

        # Run the UDF code
        data = run_udf(code=code, epsg_code=epsg_code, datacube_list=datacubes)

        # Read only the first cube
        datacubes = data.get_datacube_list()
        first_cube_array: xarray.DataArray = datacubes[0].get_array()

        if first is False:
            if 't' in first_cube_array.coords:
                result_start_times = first_cube_array.coords['t']

        # Three dimensions
        if first_cube_array.ndim == 3:
            for count, slice in enumerate(first_cube_array):
                output_map = open_output_maps[count]
                # print(f"Write slice at index {index} \n{slice} for map {output_map.name}")
                for row in slice:
                    # Write the result into the output raster map
                    b = Buffer(shape=(region.cols, ), mtype=mtype)
                    b[:] = row[:]
                    output_map.put_row(b)
        # Two dimensions
        elif first_cube_array.ndim == 2:
            output_map = open_output_maps[0]
            # print(f"Write slice at index {index} \n{slice} for map {output_map.name}")
            for row in first_cube_array:
                # Write the result into the output raster map
                b = Buffer(shape=(region.cols, ), mtype=mtype)
                b[:] = row[:]
                output_map.put_row(b)

        first = True

    # Create new STRDS
    new_sp = open_new_stds(
        name=output,
        type="strds",
        temporaltype=input_strds[0].strds.get_temporal_type(),
        title="new STRDS",
        descr="New STRDS from UDF",
        semantic="UDF",
        overwrite=gcore.overwrite(),
        dbif=dbif)

    maps_to_register = []
    for count, output_map in enumerate(open_output_maps):
        output_map.close()
        print(output_map.fullname())
        rd = RasterDataset(output_map.fullname())
        if input_strds[0].strds.is_time_absolute():
            if hasattr(result_start_times, "data"):
                d = pandas.to_datetime(result_start_times.data[count])
            else:
                d = result_start_times[count]
            rd.set_absolute_time(start_time=d)
        elif input_strds[0].strds.is_time_relative():
            if hasattr(result_start_times, "data"):
                d = result_start_times.data[count]
            else:
                d = result_start_times[count]
            rd.set_relative_time(start_time=d, end_time=None, unit="seconds")
        rd.load()
        if rd.is_in_db(dbif=dbif):
            rd.update(dbif=dbif)
        else:
            rd.insert(dbif=dbif)
        maps_to_register.append(rd)
        rd.print_info()

    register_map_object_list(type="raster",
                             map_list=maps_to_register,
                             output_stds=new_sp,
                             dbif=dbif)

    dbif.close()