Beispiel #1
0
 def test_name(self):
     r = RasterRow(self.name)
     r.open(mode='r')
     self.assertEqual(r.name, self.name)
     fullname = "{name}@{mapset}".format(name=r.name, mapset=r.mapset)
     self.assertEqual(r.fullname(), fullname)
     r.close()
Beispiel #2
0
 def test_name(self):
     r = RasterRow(self.name)
     r.open(mode='r')
     self.assertEqual(r.name, self.name)
     fullname = "{name}@{mapset}".format(name=r.name, mapset=r.mapset)
     self.assertEqual(r.fullname(), fullname)
     r.close()
Beispiel #3
0
def main():

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    basename = options["basename"]
    where = options["where"]
    pyfile = options["pyfile"]
    nrows = int(options["nrows"])

    input_name_list = inputs.split(",")

    input_strds: List[StrdsEntry] = []

    # Import the python code into the current function context
    code = open(pyfile, "r").read()
    projection_kv = gcore.parse_command("g.proj", flags="g")
    epsg_code = projection_kv["epsg"]

    tgis.init()
    mapset = gcore.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    region = Region()
    num_input_maps = 0
    open_output_maps = []

    for input_name in input_name_list:
        sp = tgis.open_old_stds(input_name, "strds", dbif)
        map_list = sp.get_registered_maps_as_objects(where=where,
                                                     order="start_time",
                                                     dbif=dbif)

        if not map_list:
            dbif.close()
            gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

        if nrows == 0:
            dbif.close()
            gcore.fatal(_("Number of rows for the udf must be greater 0."))

        num_input_maps = len(map_list)
        input_strds.append(
            StrdsEntry(dbif=dbif, strds=sp, map_list=map_list, region=region))

    for strds in input_strds:
        if len(strds.map_list) != num_input_maps:
            dbif.close()
            gcore.fatal(
                _("The number of maps in the input STRDS must be equal"))

    # Setup the input strds to compute the output maps and the resulting strds
    mtype = None
    for strds in input_strds:
        strds.setup()
        mtype = strds.mtype

    num_output_maps = count_resulting_maps(input_strds=input_strds,
                                           code=code,
                                           epsg_code=epsg_code)

    if num_output_maps == 1:
        output_map = RasterRow(name=basename)
        output_map.open(mode="w", mtype=mtype, overwrite=gcore.overwrite())
        open_output_maps.append(output_map)
    elif num_output_maps > 1:
        for index in range(num_output_maps):
            output_map = RasterRow(name=basename + f"_{index}", mapset=mapset)
            output_map.open(mode="w", mtype=mtype, overwrite=gcore.overwrite())
            open_output_maps.append(output_map)
    else:
        dbif.close()
        gcore.fatal(_("No result generated") % input)

    # Workaround because time reduction will remove the timestamp
    result_start_times = [datetime.now()]
    first = False

    # Read several rows for each map of each input strds and load them into the udf
    for index in range(0, region.rows, nrows):
        if index + nrows > region.rows:
            usable_rows = index + nrows - region.rows + 1
        else:
            usable_rows = nrows

        # Read all input strds as cubes
        datacubes = []
        for strds in input_strds:
            datacube = strds.to_datacube(index=index, usable_rows=usable_rows)
            datacubes.append(datacube)

        # Run the UDF code
        data = run_udf(code=code, epsg_code=epsg_code, datacube_list=datacubes)

        # Read only the first cube
        datacubes = data.get_datacube_list()
        first_cube_array: xarray.DataArray = datacubes[0].get_array()

        if first is False:
            if 't' in first_cube_array.coords:
                result_start_times = first_cube_array.coords['t']

        # Three dimensions
        if first_cube_array.ndim == 3:
            for count, slice in enumerate(first_cube_array):
                output_map = open_output_maps[count]
                # print(f"Write slice at index {index} \n{slice} for map {output_map.name}")
                for row in slice:
                    # Write the result into the output raster map
                    b = Buffer(shape=(region.cols, ), mtype=mtype)
                    b[:] = row[:]
                    output_map.put_row(b)
        # Two dimensions
        elif first_cube_array.ndim == 2:
            output_map = open_output_maps[0]
            # print(f"Write slice at index {index} \n{slice} for map {output_map.name}")
            for row in first_cube_array:
                # Write the result into the output raster map
                b = Buffer(shape=(region.cols, ), mtype=mtype)
                b[:] = row[:]
                output_map.put_row(b)

        first = True

    # Create new STRDS
    new_sp = open_new_stds(
        name=output,
        type="strds",
        temporaltype=input_strds[0].strds.get_temporal_type(),
        title="new STRDS",
        descr="New STRDS from UDF",
        semantic="UDF",
        overwrite=gcore.overwrite(),
        dbif=dbif)

    maps_to_register = []
    for count, output_map in enumerate(open_output_maps):
        output_map.close()
        print(output_map.fullname())
        rd = RasterDataset(output_map.fullname())
        if input_strds[0].strds.is_time_absolute():
            if hasattr(result_start_times, "data"):
                d = pandas.to_datetime(result_start_times.data[count])
            else:
                d = result_start_times[count]
            rd.set_absolute_time(start_time=d)
        elif input_strds[0].strds.is_time_relative():
            if hasattr(result_start_times, "data"):
                d = result_start_times.data[count]
            else:
                d = result_start_times[count]
            rd.set_relative_time(start_time=d, end_time=None, unit="seconds")
        rd.load()
        if rd.is_in_db(dbif=dbif):
            rd.update(dbif=dbif)
        else:
            rd.insert(dbif=dbif)
        maps_to_register.append(rd)
        rd.print_info()

    register_map_object_list(type="raster",
                             map_list=maps_to_register,
                             output_stds=new_sp,
                             dbif=dbif)

    dbif.close()