def test_async_persistent_processing(self):

        iface = GRaaSInterface(self.gconf)
        process_chain = {
            "version": "1",
            "list": [
                {"id": "g_region_1",
                 "module": "g.region",
                 "flags": "g"}]}

        status, resource_id = iface.async_persistent_processing(location="LL",
                                                                mapset="new_user_mapset",
                                                                process_chain=process_chain)
        print(status)
        print(resource_id)
        self.assertEqual(status, 200)

        status, info = iface.resource_info(resource_id)
        print(status)
        print(info)

        time.sleep(2)

        status, info = iface.resource_info(resource_id)
        print(status)
        print(info)
    def test_list_strds(self):
        iface = GRaaSInterface(self.gconf)
        status, layers = iface.list_strds(location="ECAD", mapset="PERMANENT")
        pprint(layers)

        self.assertEqual(status, 200)
        self.assertEqual(len(layers), 2)
    def test_mapset_info(self):
        iface = GRaaSInterface(self.gconf)
        status, info = iface.mapset_info(location="ECAD", mapset="PERMANENT")
        pprint(info)

        self.assertEqual(status, 200)
        self.assertTrue("region" in info)
        self.assertTrue("projection" in info)
    def test_strds_info(self):
        iface = GRaaSInterface(self.gconf)
        status, info = iface.layer_info(layer_name="ECAD.PERMANENT.strds.precipitation_1950_2013_yearly_mm")
        pprint(info)

        self.assertEqual(status, 200)
        self.assertTrue("temporal_type" in info)
        self.assertTrue("aggregation_type" in info)
        self.assertTrue("creation_time" in info)
        self.assertTrue("creator" in info)
        self.assertTrue("granularity" in info)
        self.assertTrue("modification_time" in info)
        self.assertTrue("number_of_maps" in info)
    def test_mapset_creation_deletion(self):

        config = self.gconf
        config.USER = "******"
        iface = GRaaSInterface(config)
        status, resource_id = iface.create_mapset(location="LL", mapset="new_mapset")
        print(status)
        self.assertEqual(status, 200)
        print(resource_id)
        status, resource_id = iface.delete_mapset(location="LL", mapset="new_mapset")
        print(status)
        self.assertEqual(status, 200)
        print(resource_id)
Ejemplo n.º 6
0
def get_process_list(args):
    """Analyse the process description and return the GRaaS process chain and the name of the processing result layer
    which is a single raster layer

    :param args: The process description
    :return: (output_name, pc)
    """

    # Get the input description and the process chain to attach this process
    input_names, process_list = process_definitions.analyse_process_graph(args)
    output_names = []

    for input_name in input_names:

        location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
            input_name)
        output_name = "%s_%s" % (layer_name, PROCESS_NAME)
        output_names.append(output_name)

        if "python_file_url" in args:
            python_file_url = args["python_file_url"]
        else:
            raise Exception("Python fle is missing in the process description")

        pc = create_graas_process_chain_entry(input_name=input_name,
                                              python_file_url=python_file_url,
                                              output_name=output_name)
        process_list.append(pc)

    return output_names, process_list
def create_graas_process_chain_entry(input_name):
    """Create a GRaaS command of the process chain that computes the regional statistics based on a
    strds and a polygon.

    :param input_name: The name of the raster layer
    :return: A GRaaS process chain description
    """

    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
        input_name)
    input_name = layer_name
    if mapset is not None:
        input_name = layer_name + "@" + mapset

    rn = randint(0, 1000000)
    pc = []

    exporter = {
        "id":
        "exporter_%i" % rn,
        "module":
        "exporter",
        "outputs": [{
            "export": {
                "type": "raster",
                "format": "GTiff"
            },
            "param": "map",
            "value": input_name
        }]
    }

    pc.append(exporter)

    return pc
def get_process_list(args):
    """Analyse the process description and return the GRaaS process chain and the name of the processing result
    strds that was filtered by start and end date

    :param args: The process description
    :return: (output_name, pc)
    """

    # Get the input description and the process chain to attach this process
    input_names, process_list = process_definitions.analyse_process_graph(args)
    output_names = []

    for input_name in input_names:

        location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
            input_name)
        output_name = "%s_%s" % (layer_name, PROCESS_NAME)
        output_names.append(output_name)

        start_time = None
        end_time = None

        if "from" in args:
            start_time = args["from"]
        if "to" in args:
            end_time = args["to"]

        pc = create_graas_process_chain_entry(input_name=input_name,
                                              start_time=start_time,
                                              end_time=end_time,
                                              output_name=output_name)
        process_list.append(pc)

    return output_names, process_list
def create_graas_process_chain_entry(nir_time_series, red_time_series, output_time_series):
    """Create a GRaaS process description that uses t.rast.series to create the minimum
    value of the time series.

    :param nir_time_series: The NIR band time series name
    :param red_time_series: The RED band time series name
    :param output_time_series: The name of the output time series
    :return: A list of GRaaS process chain descriptions
    """
    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(nir_time_series)
    nir_time_series = layer_name
    if mapset is not None:
        nir_time_series = layer_name + "@" + mapset

    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(red_time_series)
    red_time_series = layer_name
    if mapset is not None:
        red_time_series = layer_name + "@" + mapset

    location, mapset, datatype, output_name = GRaaSInterface.layer_def_to_components(output_time_series)

    rn = randint(0, 1000000)

    pc = [
        {"id": "t_rast_mapcalc_%i" % rn,
         "module": "t.rast.mapcalc",
         "inputs": [{"param": "expression",
                     "value": "%(result)s = float((%(nir)s - %(red)s)/"
                              "(%(nir)s + %(red)s))" % {"result": output_name,
                                                        "nir": nir_time_series,
                                                        "red": red_time_series}},
                    {"param": "inputs",
                     "value": "%(nir)s,%(red)s"%{"nir": nir_time_series,
                                                 "red": red_time_series}},
                    {"param": "basename",
                     "value": "ndvi"},
                    {"param": "output",
                     "value": output_name}]},
        {"id": "t_rast_color_%i" % rn,
         "module": "t.rast.colors",
         "inputs": [{"param": "input",
                     "value": output_name},
                    {"param": "color",
                     "value": "ndvi"}]}]

    return pc
def create_graas_process_chain_entry(input_name, start_time, end_time,
                                     output_name):
    """Create a GRaaS command of the process chain that uses t.rast.extract to create a subset of a strds

    :param strds_name: The name of the strds
    :param start_time:
    :param end_time:
    :return: A GRaaS process chain description
    """
    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
        input_name)
    input_name = layer_name
    if mapset is not None:
        input_name = layer_name + "@" + mapset
    base_name = "%s_extract" % layer_name

    # Get info about the time series to extract its resolution settings and bbox
    rn = randint(0, 1000000)

    pc = {
        "id":
        "t_rast_extract_%i" % rn,
        "module":
        "t.rast.extract",
        "inputs": [{
            "param": "input",
            "value": input_name
        }, {
            "param": "where",
            "value": "start_time >= '%(start)s' "
            "AND end_time <= '%(end)s'" % {
                "start": start_time,
                "end": end_time
            }
        }, {
            "param": "output",
            "value": output_name
        }, {
            "param": "expression",
            "value": "1.0 * %s" % input_name
        }, {
            "param": "basename",
            "value": base_name
        }, {
            "param": "suffix",
            "value": "num"
        }]
    }

    return pc
def get_process_list(args):
    """Analyse the process description and return the GRaaS process chain and the name of the processing result layer
    which is a single raster layer

    :param args: The process description arguments
    :return: (output_name, pc)
    """
    input_names, process_list = process_definitions.analyse_process_graph(args)
    output_names = []

    for input_name in input_names:
        location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
            input_name)
        output_name = "%s_%s" % (layer_name, PROCESS_NAME)
        output_names.append(output_name)

        pc = create_graas_process_chain_entry(input_name, output_name)
        process_list.append(pc)

    return output_names, process_list
def get_process_list(args):
    """Analyse the process description and return the GRaaS process chain and the name of the processing result

    :param args: The process description arguments
    :return: (output_time_series, pc)
    """

    input_names, process_list = process_definitions.analyse_process_graph(args)
    output_names = []

    # Two input names are required
    if len(input_names) != 2:
        raise Exception("At least two input time series are required")

    for i in range(0, len(input_names), 2):

        input_tuple = (input_names[i], input_names[i + 1])

        nir_time_series = None
        red_time_series = None

        for input_name in input_tuple:
            if "nir" in args and args["nir"] in input_name:
                nir_time_series = input_name
            if "red" in args and args["red"] in input_name:
                red_time_series = input_name

        if nir_time_series is None or red_time_series is None:
            raise Exception("Band information is missing from process description")

        location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(nir_time_series)
        output_name = "%s_%s" % (layer_name, PROCESS_NAME)
        output_names.append(output_name)

        pc = create_graas_process_chain_entry(nir_time_series, red_time_series, output_name)
        process_list.extend(pc)

    return output_names, process_list
Ejemplo n.º 13
0
def create_graas_process_chain_entry(input_name, python_file_url, output_name):
    """Create a GRaaS command of the process chain that uses g.region to create a valid computational region
    for the provide input strds

    :param strds_name: The name of the strds
    :param python_file_url: The URL to the python file that defines the UDF
    :param output_name: The name of the output raster layer
    :return: A GRaaS process chain description
    """

    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
        input_name)
    input_name = layer_name
    if mapset is not None:
        input_name = layer_name + "@" + mapset

    pc = {
        "id":
        "t_rast_aggr_func",
        "module":
        "t.rast.aggr_func",
        "inputs": [{
            "import_descr": {
                "source": python_file_url,
                "type": "file"
            },
            "param": "pyfile",
            "value": "$file::my_py_func"
        }, {
            "param": "input",
            "value": input_name
        }, {
            "param": "output",
            "value": output_name
        }]
    }

    return pc
def create_graas_process_chain_entry(input_name, output_name):
    """Create a GRaaS process description that uses t.rast.series to create the minimum
    value of the time series.

    :param input_time_series: The input time series name
    :param output_map: The name of the output map
    :return: A GRaaS process chain description
    """

    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
        input_name)
    input_name = layer_name
    if mapset is not None:
        input_name = layer_name + "@" + mapset

    rn = randint(0, 1000000)

    pc = {
        "id":
        "t_rast_series_%i" % rn,
        "module":
        "t.rast.series",
        "inputs": [{
            "param": "input",
            "value": input_name
        }, {
            "param": "method",
            "value": "minimum"
        }, {
            "param": "output",
            "value": output_name
        }],
        "flags":
        "t"
    }

    return pc
 def __init__(self):
     self.iface = GRaaSInterface()
class GRaaSDataProductId(DataProductId):
    def __init__(self):
        self.iface = GRaaSInterface()

    @swagger.doc(GET_DATA_PRODUCT_ID_DOC)
    def get(self, product_id):

        # List strds maps from the GRASS location

        location, mapset, datatype, layer = self.iface.layer_def_to_components(
            product_id)

        status_code, layer_data = self.iface.layer_info(layer_name=product_id)
        if status_code != 200:
            return make_response(
                jsonify(
                    {
                        "description":
                        "An internal error occurred "
                        "while catching GRASS GIS layer information "
                        "for layer <%s>!\n Error: %s"
                        "" % (product_id, str(layer_data))
                    }, 400))

        # Get the projection from the GRASS mapset
        status_code, mapset_info = self.iface.mapset_info(location=location,
                                                          mapset=mapset)
        if status_code != 200:
            return make_response(
                jsonify(
                    {
                        "description":
                        "An internal error occurred "
                        "while catching mapset info "
                        "for mapset <%s>!" % mapset
                    }, 400))

        description = "Raster dataset"
        if datatype.lower() == "strds":
            description = "Space time raster dataset"
        if datatype.lower() == "vector":
            description = "Vector dataset"

        source = "GRASS GIS location/mapset path: /%s/%s" % (location, mapset)
        srs = mapset_info["projection"]
        extent = SpatialExtent(left=float(layer_data["west"]),
                               right=float(layer_data["east"]),
                               top=float(layer_data["north"]),
                               bottom=float(layer_data["south"]),
                               srs=srs)

        print(layer_data)

        if datatype.lower() == "strds":
            time = DateTime()
            time["from"] = layer_data["start_time"]
            time["to"] = layer_data["end_time"]

            bands = BandDescription(band_id=product_id)

            info = dict(product_id=product_id,
                        extent=extent,
                        source=source,
                        description=description,
                        time=time,
                        bands=bands,
                        temporal_type=layer_data["start_time"],
                        number_of_maps=layer_data["number_of_maps"],
                        min_min=layer_data["min_min"],
                        min_max=layer_data["min_max"],
                        max_min=layer_data["max_min"],
                        max_max=layer_data["max_max"],
                        ewres_max=layer_data["ewres_max"],
                        ewres_min=layer_data["ewres_min"],
                        nsres_max=layer_data["nsres_max"],
                        nsres_min=layer_data["nsres_min"],
                        map_time=layer_data["map_time"],
                        granularity=layer_data["granularity"],
                        aggregation_type=layer_data["aggregation_type"],
                        creation_time=layer_data["creation_time"],
                        modification_time=layer_data["modification_time"],
                        mapset=mapset,
                        location=location)
        else:
            info = dict(
                product_id=product_id,
                extent=extent,
                source=source,
                description=description,
                mapset=mapset,
                location=location,
                title=layer_data["title"],
                comments=layer_data["comments"],
                datatype=layer_data["datatype"],
                cells=layer_data["cells"],
                cols=layer_data["cols"],
                rows=layer_data["rows"],
                ewres=layer_data["ewres"],
                nsres=layer_data["nsres"],
            )

        return make_response(jsonify(info), 200)
 def test_layer_exists_4(self):
     iface = GRaaSInterface(self.gconf)
     status = iface.check_layer_exists(layer_name="ECAD.PERMANENT.raster.precipitation_yearly_mm_0")
     self.assertTrue(status)
class GRaaSJobsJobId(JobsJobId):
    def __init__(self):
        self.iface = GRaaSInterface()
        self.db = GraphDB()

    @swagger.doc(GET_JOBS_ID_DOC)
    def get(self, job_id):

        try:
            status, response = self.iface.resource_info(job_id)
            if status == 200:
                process_graph = self.db[job_id]

                info = dict(job_id=job_id,
                            user_id=response["user_id"],
                            status=response["status"],
                            process_graph=process_graph,
                            submitted=response["accept_datetime"],
                            last_update=response["datetime"],
                            consumed_credits=response["time_delta"],
                            job_info=response)

                if "urls" in response and "resources" in response["urls"]:
                    info["resources"] = response["urls"]["resources"]

                return make_response(jsonify(info), 200)
            else:
                process_graph = self.db[job_id]
                info = dict(job_id=job_id,
                            status="error",
                            process_graph=process_graph,
                            job_info=response)

                return make_response(jsonify(info), status)
        except Exception as e:
            return make_response(jsonify({"error": str(e)}), 500)

    @swagger.doc(DELETE_JOBS_ID_DOC)
    def delete(self, job_id):

        try:
            status, response = self.iface.resource_info(job_id)

            if status == 200:

                process_graph = self.db[job_id]
                info = dict(job_id=job_id,
                            user_id="scheduled",
                            status="submitted",
                            process_graph=process_graph,
                            submitted=response["accept_datetime"],
                            last_update=response["datetime"],
                            consumed_credits=response["time_delta"],
                            job_info=response)

                status, response = self.iface.delete_resource(job_id)
                if status != 200:
                    process_graph = self.db[job_id]
                    info = dict(job_id=job_id,
                                status="error",
                                process_graph=process_graph,
                                job_info=response)
                return make_response(jsonify(info), status)
            else:
                process_graph = self.db[job_id]
                info = dict(job_id=job_id,
                            status="error",
                            process_graph=process_graph,
                            job_info=response)

                return make_response(jsonify(info), status)
        except Exception as e:
            return make_response(jsonify({"error": str(e)}), 500)
 def test_health_check(self):
     iface = GRaaSInterface(self.gconf)
     self.assertTrue(iface.check_health())
Ejemplo n.º 20
0
class GRaaSJobs(Jobs):
    def __init__(self):
        self.iface = GRaaSInterface()
        self.db = GraphDB()

    @swagger.doc(POST_JOBS_DOC)
    def put(self):
        """Modify the existing database by running the job in a persistent mapset

        :return:
        """

        try:

            # Empty the process location
            graas_openeo_core_wrapper.PROCESS_LOCATION = {}
            process_graph = request.get_json()
            # Transform the process graph into a process chain and store the input location
            # Check all locations in the process graph
            result_name, process_list = analyse_process_graph(process_graph)

            if len(graas_openeo_core_wrapper.PROCESS_LOCATION) == 0 or len(
                    graas_openeo_core_wrapper.PROCESS_LOCATION) > 1:
                return make_response(
                    jsonify(
                        {
                            "description":
                            "Processes can only be defined for a single location!"
                        }, 400))

            location = graas_openeo_core_wrapper.PROCESS_LOCATION.keys()
            location = list(location)[0]

            status_code, mapsets = self.iface.list_mapsets(location=location)
            if status_code != 200:
                return make_response(
                    jsonify(
                        {
                            "description":
                            "An internal error occurred "
                            "while catching mapsets!"
                        }, 400))

            count = 0
            name = "openeo_mapset"
            new_mapset = "%s_%i" % (name, count)
            while new_mapset in mapsets:
                count += 1
                new_mapset = "%s_%i" % (name, count)

            process_chain = dict(list=process_list, version="1")

            # pprint.pprint(process_chain)

            status, response = self.iface.async_persistent_processing(
                location=location,
                mapset=new_mapset,
                process_chain=process_chain)
            # pprint.pprint(response)

            # Save the process graph into the graph db
            self.db[response["resource_id"]] = process_graph

            if status == 200:
                return make_response(
                    jsonify({
                        "job_id": response["resource_id"],
                        "job_info": response
                    }), status)
            else:
                return make_response(jsonify(response), status)
        except Exception as e:
            return make_response(jsonify({"error": str(e)}), 400)

    @swagger.doc(POST_JOBS_DOC)
    def post(self):
        """Run the job in an ephemeral mapset

        :return:
        """

        try:
            # Empty the process location
            graas_openeo_core_wrapper.PROCESS_LOCATION = {}
            process_graph = request.get_json()
            # Transform the process graph into a process chain and store the input location
            # Check all locations in the process graph
            result_name, process_list = analyse_process_graph(process_graph)

            if len(graas_openeo_core_wrapper.PROCESS_LOCATION) == 0 or len(
                    graas_openeo_core_wrapper.PROCESS_LOCATION) > 1:
                return make_response(
                    jsonify(
                        {
                            "description":
                            "Processes can only be defined for a single location!"
                        }, 400))

            location = graas_openeo_core_wrapper.PROCESS_LOCATION.keys()
            location = list(location)[0]

            process_chain = dict(list=process_list, version="1")

            # pprint.pprint(process_chain)

            status, response = self.iface.async_ephemeral_processing_export(
                location=location, process_chain=process_chain)
            # pprint.pprint(response)

            # Save the process graph into the graph db
            self.db[response["resource_id"]] = process_graph

            if status == 200:
                return make_response(
                    jsonify({
                        "job_id": response["resource_id"],
                        "job_info": response
                    }), status)
            else:
                return make_response(jsonify(response), status)
        except Exception as e:
            return make_response(jsonify({"error": str(e)}), 400)
    def test_list_mapsets(self):
        iface = GRaaSInterface(self.gconf)
        status, mapsets = iface.list_mapsets(location="ECAD")
        pprint(mapsets)

        self.assertEqual(status, 200)
Ejemplo n.º 22
0
class GRaaSData(Data):
    def __init__(self):
        self.iface = GRaaSInterface()

    @swagger.doc(GET_DATA_DOC)
    def get(self, ):

        dataset_list = []

        for location in Config.LOCATIONS:

            status_code, mapsets = self.iface.list_mapsets(location=location)
            if status_code != 200:
                return make_response(
                    jsonify(
                        {
                            "description":
                            "An internal error occurred "
                            "while catching mapset "
                            "from location %s!" % location
                        }, 400))

            for mapset in mapsets:

                # List strds maps from the GRASS location
                status_code, strds_data = self.iface.list_strds(
                    location=location, mapset=mapset)
                if status_code != 200:
                    return make_response(
                        jsonify(
                            {
                                "description":
                                "An internal error occurred "
                                "while catching strds layers!"
                            }, 400))

                for entry in strds_data:
                    strds_id = "%s.%s.strds.%s" % (location, mapset, entry)
                    ds = DataSetListEntry(
                        product_id=strds_id,
                        description="Space time raster dataset",
                        source="GRASS GIS location/mapset path: "
                        "/%s/%s" % (location, mapset))
                    dataset_list.append(ds)

                # List raster maps from the GRASS location
                status_code, raster_data = self.iface.list_raster(
                    location=location, mapset=mapset)
                if status_code != 200:
                    return make_response(
                        jsonify(
                            {
                                "description":
                                "An internal error occurred "
                                "while catching strds layers!"
                            }, 400))

                for entry in raster_data:
                    raster_id = "%s.%s.raster.%s" % (location, mapset, entry)
                    ds = DataSetListEntry(
                        product_id=raster_id,
                        description="Raster dataset",
                        source="GRASS GIS location/mapset path: "
                        "/%s/%s" % (location, mapset))
                    dataset_list.append(ds)

        return make_response(jsonify(dataset_list), 200)
 def test_layer_exists_2_error(self):
     iface = GRaaSInterface(self.gconf)
     status = iface.check_layer_exists(layer_name="ECAD.PERMANENT.strds.precipitation_1950_2013_yearly_mm_nope")
     self.assertFalse(status)
Ejemplo n.º 24
0
 def __init__(self):
     self.iface = GRaaSInterface()
     self.db = GraphDB()
Ejemplo n.º 25
0
def create_graas_process_chain_entry(input_name, regions):
    """Create a GRaaS command of the process chain that computes the regional statistics based on a
    strds and a polygon.

    The computational region will be set to the vector map, the previous region will be saved and after processing
    restored. A mask will be set that uses the vector file as input. This mask will be removed in the end.

    :param input_name: The name of the strds
    :param regions: The URL to the vector file that defines the regions of interest
    :return: A GRaaS process chain description
    """

    location, mapset, datatype, layer_name = GRaaSInterface.layer_def_to_components(
        input_name)
    input_name = layer_name
    if mapset is not None:
        input_name = layer_name + "@" + mapset

    rn = randint(0, 1000000)
    pc = []

    importer = {
        "id":
        "importer_%i" % rn,
        "module":
        "importer",
        "inputs": [{
            "import_descr": {
                "source": regions,
                "type": "vector"
            },
            "param": "map",
            "value": "polygon"
        }]
    }

    g_region_1 = {
        "id": "g_region_%i" % rn,
        "module": "g.region",
        "inputs": [{
            "param": "save",
            "value": "previous_region"
        }],
        "flags": "g"
    }

    g_region_2 = {
        "id": "g_region_%i" % rn,
        "module": "g.region",
        "inputs": [{
            "param": "vector",
            "value": "polygon"
        }],
        "flags": "g"
    }

    r_mask_1 = {
        "id": "r_mask_%i" % rn,
        "module": "r.mask",
        "inputs": [{
            "param": "vector",
            "value": "polygon"
        }]
    }

    t_rast_univar = {
        "id": "t_rast_univar_%i" % rn,
        "module": "t.rast.univar",
        "inputs": [{
            "param": "input",
            "value": input_name
        }]
    }

    r_mask_2 = {"id": "r_mask_%i" % rn, "module": "r.mask", "flags": "r"}

    g_region_3 = {
        "id": "g_region_%i" % rn,
        "module": "g.region",
        "inputs": [{
            "param": "region",
            "value": "previous_region"
        }],
        "flags": "g"
    }

    pc.append(importer)
    pc.append(g_region_1)
    pc.append(g_region_2)
    pc.append(r_mask_1)
    pc.append(t_rast_univar)
    pc.append(r_mask_2)
    pc.append(g_region_3)

    return pc