def test_async_persistent_processing(self):

        iface = ActiniaInterface(self.gconf)
        process_chain = {
            "version": "1",
            "list": [{
                "id": "g_region_1",
                "module": "g.region",
                "flags": "g"
            }]
        }

        status, response = iface.async_persistent_processing(
            location="nc_spm_08",
            mapset="new_user_mapset",
            process_chain=process_chain)
        resource_id = response["resource_id"]
        print(status)
        print(resource_id)
        self.assertEqual(status, 200)

        status, info = iface.resource_info(resource_id)
        print(status)
        print(info)

        time.sleep(2)

        status, info = iface.resource_info(resource_id)
        print(status)
        print(info)
        self.assertEqual(info["status"], "finished")
Exemple #2
0
class JobsJobIdResults(ResourceBase):
    def __init__(self):
        self.iface = ActiniaInterface()
        self.iface.set_auth(request.authorization.username,
                            request.authorization.password)
        self.db = GraphDB()
        self.job_db = JobDB()
        self.actinia_job_db = ActiniaJobDB()

    def get(self, job_id):
        """Return information about a single job

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/get
        """

        if job_id in self.job_db:
            job: JobInformation = self.job_db[job_id]

            # Check for the actinia id to get the latest actinia job information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                print("Resource id", actinia_id)
                code, job_info = self.iface.resource_info(
                    resource_id=actinia_id)

                if code == 200:
                    # Add the actinia information to the openeo job
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        job.updated = job_info["datetime"]
                        if job_info["status"] == "finished":
                            job.status = "finished"
                        if job_info["status"] == "error":
                            job.status = "error"
                        if job_info["status"] == "accepted":
                            job.status = "queued"
                        if job_info["status"] == "terminated":
                            job.status = "canceled"
                        if job_info["status"] == "running":
                            job.status = "running"

                        # Store the updated job in the database
                        self.job_db[job_id] = job
                else:
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        self.job_db[job_id] = job

                if (job.additional_info['urls']
                        and job.additional_info['urls']['resources']):
                    resource_links = job.additional_info['urls']['resources']

                    if job.links is None:
                        job.links = []

                    for link in resource_links:
                        eo_link = EoLink(href=link)
                        job.links.append(eo_link)

            return make_response(job.to_json(), 200)
        else:
            return make_response(
                ErrorSchema(
                    id="123456678",
                    code=404,
                    message=f"job with id {job_id} not found in database.").
                to_json(), 404)

    def post(self, job_id):
        """Start a processing job in the actinia backend

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/post
        """
        try:
            if job_id in self.job_db:
                job: JobInformation = self.job_db[job_id]

                status, response = self.send_actinia_processing_request(
                    job=job)
                self.actinia_job_db[job_id] = response["resource_id"]

                job.additional_info = response
                job.status = "queued"
                job.updated = str(datetime.now())

                self.job_db[job_id] = job

                return make_response(
                    "The creation of the resource has been queued successfully.",
                    202)
            else:
                return make_response(
                    ErrorSchema(
                        id="123456678",
                        code=404,
                        message=f"job with id {job_id} not found in database."
                    ).to_json(), 404)
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            error = ErrorSchema(id="1234567890",
                                code=2,
                                message=str(traceback_model))
            return make_response(error.to_json(), 400)

    def send_actinia_processing_request(self, job: JobInformation):
        try:
            # Empty the process location
            ActiniaInterface.PROCESS_LOCATION = {}
            # Transform the process graph into a process chain and store the input location
            # Check all locations in the process graph
            result_name, process_list = analyse_process_graph(
                {"process_graph": job.process_graph})

            if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len(
                    ActiniaInterface.PROCESS_LOCATION) > 1:
                raise Exception(
                    "Processes can only be defined for a single location!")

            location = ActiniaInterface.PROCESS_LOCATION.keys()
            location = list(location)[0]

            process_chain = dict(list=process_list, version="1")

            # pprint.pprint(process_chain)

            status, response = self.iface.async_ephemeral_processing_export(
                location=location, process_chain=process_chain)

            return status, response
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            raise Exception(str(traceback_model))

    def delete(self, job_id):
        """Cancel a running job

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/delete
        """

        if job_id in self.job_db:

            # Check for the actinia id to get the latest actinia job information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.delete_resource(
                    resource_id=actinia_id)

            return make_response("The job has been successfully cancelled",
                                 204)
        else:
            return make_response(
                ErrorSchema(
                    id="123456678",
                    code=404,
                    message=f"job with id {job_id} not found in database.").
                to_json(), 404)
class Result(ResourceBase):
    def __init__(self):
        ResourceBase.__init__(self)
        self.iface = ActiniaInterface()
        self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD)
        self.db = GraphDB()

    def post(self):
        """Run the job in an ephemeral mapset synchronously for 10 seconds.
        After 10 seconds the running job
        will be killed on the actinia server and the response will be an
        termination report.
        """

        try:
            # Empty the process location
            ActiniaInterface.PROCESS_LOCATION = {}
            request_doc = request.get_json()
            g = Graph(graph_description=request_doc)
            result_name, process_list = g.to_actinia_process_list()

            if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len(
                    ActiniaInterface.PROCESS_LOCATION) > 1:
                descr = "Processes can only be defined for a single location!"
                return make_response(jsonify({"description": descr}, 400))

            location = ActiniaInterface.PROCESS_LOCATION.keys()
            location = list(location)[0]

            if location == "stac":
                location = "latlong_wgs84"

            process_chain = dict(list=process_list, version="1")

            status, response = self.iface.async_ephemeral_processing_export(
                location=location, process_chain=process_chain)
            status, response = self.wait_until_finished(response=response,
                                                        max_time=1000)

            if status == 200:
                result_url = response["urls"]["resources"]
                if len(result_url) == 1:
                    # attempt to return an image
                    result_data = self.iface.get_resource(result_url[0])
                    if result_url[0][-4:] in ("tiff", ".tif"):
                        mimetype = "image/tiff"
                    else:
                        mimetype = "unknown"

                    return Response(result_data.content,
                                    mimetype=mimetype,
                                    direct_passthrough=True)

                return make_response(
                    jsonify({
                        "job_id": response["resource_id"],
                        "job_info": response
                    }), status)
            else:
                return ErrorSchema(
                    id="1234567890",
                    code=404,
                    message=str(response),
                    links=response["urls"]["status"]).as_response(status)
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            return ErrorSchema(id="1234567890",
                               code=404,
                               message=str(traceback_model)).as_response(404)

    def wait_until_finished(self, response, max_time: int = 10):
        """Poll the status of a resource and assert its finished HTTP status

        The response will be checked if the resource was accepted.
        Hence it must always be HTTP 200 status.

        The status URL from the response is then polled until status:
        finished, error or terminated.
        The result of the poll can be checked against its HTTP status and its
        GRaaS status message.

        Args:
            response: The accept response
            max_time (int): The maximum time to wait, until the job gets killed

        Returns: response

        """
        # Check if the resource was accepted

        if "resource_id" not in response:
            raise Exception(f"Internal server error: {str(response)}")
        resource_id = response["resource_id"]
        start_time = time.time()

        while True:
            status, resp_data = self.iface.resource_info(resource_id)

            if isinstance(resp_data,
                          dict) is False or "status" not in resp_data:
                raise Exception("wrong return values %s" % str(resp_data))
            if resp_data["status"] == "finished" or \
                    resp_data["status"] == "error" or \
                    resp_data["status"] == "terminated":
                break
            time.sleep(1)

            current_time = time.time()
            if current_time - start_time > max_time:
                status_code, data = self.iface.delete_resource(
                    resource_id=resource_id)

                if status_code != 200:
                    raise Exception(f"Unable to terminate job, error: {data}")

        return status, resp_data
Exemple #4
0
class JobsJobIdResults(ResourceBase):
    def __init__(self):
        ResourceBase.__init__(self)
        self.iface = ActiniaInterface()
        # really use ActiniaConfig user + pw ?
        self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD)
        self.db = GraphDB()
        self.job_db = JobDB()
        self.actinia_job_db = ActiniaJobDB()

    def get(self, job_id):
        """Return information about a single job

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/get
        """

        if job_id in self.job_db:
            job: JobInformation = self.job_db[job_id]

            job.stac_version = CAPABILITIES['stac_version']
            job.type = "Feature"
            job.geometry = "json:null"
            job.properties = dict()
            job.properties['datetime'] = None
            job.assets = dict()
            job.links = []

            # Check for the actinia id to get the latest actinia job
            # information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.resource_info(
                    resource_id=actinia_id)

                if code == 200:
                    # Add the actinia information to the openeo job
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        job.updated = job_info["datetime"].replace(
                            " ", "T").replace("'", "").replace('"', '')
                        if job_info["status"] == "finished":
                            job.status = "finished"
                        if job_info["status"] == "error":
                            job.status = "error"
                        if job_info["status"] == "accepted":
                            job.status = "queued"
                        if job_info["status"] == "terminated":
                            job.status = "canceled"
                        if job_info["status"] == "running":
                            job.status = "running"

                        # Store the updated job in the database
                        self.job_db[job_id] = job
                else:
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        self.job_db[job_id] = job

                if (job.additional_info['urls']
                        and "resources" in job.additional_info['urls']):
                    resource_links = job.additional_info['urls']['resources']

                    if job.links is None:
                        job.links = []

                    for link in resource_links:
                        eo_link = EoLink(href=link)
                        job.links.append(eo_link)

            return job.as_response(http_status=200)
        else:
            return ErrorSchema(
                id="123456678",
                code=404,
                message=f"job with id {job_id} not found in database."
            ).as_response(http_status=404)

    def post(self, job_id):
        """Start a processing job in the actinia backend

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/post
        """
        try:
            if job_id in self.job_db:
                job: JobInformation = self.job_db[job_id]

                status, response = self.send_actinia_processing_request(
                    job=job)
                if "resource_id" not in response:
                    return make_response(
                        ErrorSchema(
                            id="12345678",
                            code=status,
                            message=f"Internal server error: {str(response)}").
                        to_json(), status)
                self.actinia_job_db[job_id] = response["resource_id"]

                job.additional_info = response
                job.status = "queued"
                job.updated = str(datetime.now().isoformat())

                self.job_db[job_id] = job

                return make_response(
                    "The creation of the resource has been queued successfully.",
                    202)
            else:
                return ErrorSchema(
                    id="123456678",
                    code=404,
                    message=f"job with id {job_id} not found in database."
                ).as_response(http_status=404)
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            return ErrorSchema(
                id="1234567890", code=2,
                message=str(traceback_model)).as_response(http_status=400)

    def send_actinia_processing_request(self, job: JobInformation):
        try:
            # Empty the process location
            ActiniaInterface.PROCESS_LOCATION = {}
            graph = Graph(job.process)
            result_name, process_list = graph.to_actinia_process_list()

            if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len(
                    ActiniaInterface.PROCESS_LOCATION) > 1:
                raise Exception(
                    "Processes can only be defined for a single location!")

            location = ActiniaInterface.PROCESS_LOCATION.keys()
            location = list(location)[0]

            process_chain = dict(list=process_list, version="1")

            # pprint.pprint(process_chain)

            status, response = self.iface.async_ephemeral_processing_export(
                location=location, process_chain=process_chain)

            return status, response
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            raise Exception(str(traceback_model))

    def delete(self, job_id):
        """Cancel a running job

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/delete
        """

        if job_id in self.job_db:

            # Check for the actinia id to get the latest actinia job
            # information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.delete_resource(
                    resource_id=actinia_id)

            return make_response("The job has been successfully cancelled",
                                 204)
        else:
            return ErrorSchema(
                id="123456678",
                code=404,
                message=f"job with id {job_id} not found in database."
            ).as_response(http_status=404)
Exemple #5
0
class JobsJobId(ResourceBase):
    def __init__(self):
        ResourceBase.__init__(self)
        self.iface = ActiniaInterface()
        self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD)
        self.db = GraphDB()
        self.job_db = JobDB()

    def get(self, job_id):
        """Return information about a single job

        https://api.openeo.org/#operation/describe-job
        """

        if job_id in self.job_db:
            job: JobInformation = self.job_db[job_id]

            job.stac_version = CAPABILITIES['stac_version']
            job.type = "Feature"
            job.geometry = "json:null"
            job.properties = dict()
            job.properties['datetime'] = None
            job.assets = dict()
            job.links = []

            # Check for the actinia id to get the latest actinia job
            # information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.resource_info(
                    resource_id=actinia_id)

                if code == 200:
                    # Add the actinia information to the openeo job
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        job.updated = job_info["datetime"].replace(
                            " ", "T").replace("'", "").replace('"', '')
                        if job_info["status"] == "finished":
                            job.status = "finished"
                        if job_info["status"] == "error":
                            job.status = "error"
                        if job_info["status"] == "accepted":
                            job.status = "queued"
                        if job_info["status"] == "terminated":
                            job.status = "canceled"
                        if job_info["status"] == "running":
                            job.status = "running"

                        # Store the updated job in the database
                        self.job_db[job_id] = job
                else:
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        self.job_db[job_id] = job

                if (job.additional_info['urls']
                        and "resources" in job.additional_info['urls']):
                    resource_links = job.additional_info['urls']['resources']

                    if job.links is None:
                        job.links = []

                    for link in resource_links:
                        eo_link = EoLink(href=link)
                        job.links.append(eo_link)

            return job.as_response(http_status=200)
        else:
            return ErrorSchema(
                id="123456678",
                code=404,
                message=f"job with id {job_id} not found in database."
            ).as_response(http_status=404)

    def patch(self, job_id):
        try:
            """Update a job in the job database"""
            # TODO: Implement user specific database access
            job = request.get_json()
            if job_id in self.job_db:

                if "process" not in job:
                    return ErrorSchema(
                        id=uuid4(),
                        message="A process is required in the job request"
                    ).as_response(http_status=400)

                job_info = check_job(job=job, job_id=job_id)
                self.job_db[job_id] = job_info
                return make_response(job_id, 204)
            else:
                return ErrorSchema(
                    id="123456678",
                    code=404,
                    message=f"job with id {job_id} not found in database."
                ).as_response(http_status=404)
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            return ErrorSchema(
                id="1234567890", code=2,
                message=str(traceback_model)).as_response(http_status=400)

    def delete(self, job_id):
        """Delete a single job

        https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/delete
        """

        if job_id in self.job_db:
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.delete_resource(
                    resource_id=actinia_id)

            del self.job_db[job_id]
            return make_response("The job has been successfully deleted", 204)
        else:
            return ErrorSchema(
                id="123456678",
                code=404,
                message=f"job with id {job_id} not found in database."
            ).as_response(http_status=404)
Exemple #6
0
class JobsJobIdLogs(ResourceBase):

    def __init__(self):
        ResourceBase.__init__(self)
        self.iface = ActiniaInterface()
        # really use ActiniaConfig user + pw ?
        self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD)
        self.db = GraphDB()
        self.job_db = JobDB()
        self.actinia_job_db = ActiniaJobDB()

    def get(self, job_id):
        """Return information about a single job

        https://api.openeo.org/#operation/debug-job
        """

        if job_id in self.job_db:
            job: JobInformation = self.job_db[job_id]
            job_logs = {'logs': [], 'links': []}

            # Check for the actinia id to get the latest actinia job
            # information
            if job_id in self.actinia_job_db:
                actinia_id = self.actinia_job_db[job_id]
                code, job_info = self.iface.resource_info(
                    resource_id=actinia_id)

                if code == 200:
                    # Add the actinia information to the openeo job
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        job.updated = job_info["datetime"]
                        if job_info["status"] == "finished":
                            job.status = "finished"
                        if job_info["status"] == "error":
                            job.status = "error"
                        if job_info["status"] == "accepted":
                            job.status = "queued"
                        if job_info["status"] == "terminated":
                            job.status = "canceled"
                        if job_info["status"] == "running":
                            job.status = "running"

                        # Store the updated job in the database
                        self.job_db[job_id] = job
                else:
                    if job.additional_info != job_info:
                        job.additional_info = job_info
                        self.job_db[job_id] = job

                links = []
                if (job.additional_info['urls'] and
                        "resources" in job.additional_info['urls']):
                    resource_links = job.additional_info['urls']['resources']

                    for link in resource_links:
                        eo_link = EoLink(href=link)
                        links.append(eo_link)

                job_logs['logs'] = job
                job_logs['links'] = links

            return make_response(jsonify(job_logs), 200)
        else:
            return ErrorSchema(
                id="123456678",
                code=404,
                message=f"job with id {job_id} not found in database.").as_response(
                http_status=404)
class Preview(ResourceBase):
    def __init__(self):
        self.iface = ActiniaInterface()
        self.iface.set_auth(request.authorization.username,
                            request.authorization.password)
        self.db = GraphDB()

    def post(self):
        """Run the job in an ephemeral mapset synchronously for 10 seconds. After 10 seconds the running job
        will be killed on the actinia server and the response will be an termination report.
        """

        try:
            # Empty the process location
            ActiniaInterface.PROCESS_LOCATION = {}
            request_doc = request.get_json()
            process_graph = request_doc["process_graph"]
            # Transform the process graph into a process chain and store the input location
            # Check all locations in the process graph
            result_name, process_list = analyse_process_graph(process_graph)

            if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len(
                    ActiniaInterface.PROCESS_LOCATION) > 1:
                return make_response(
                    jsonify(
                        {
                            "description":
                            "Processes can only be defined for a single location!"
                        }, 400))

            location = ActiniaInterface.PROCESS_LOCATION.keys()
            location = list(location)[0]

            process_chain = dict(list=process_list, version="1")

            # pprint.pprint(process_chain)

            status, response = self.iface.async_ephemeral_processing_export(
                location=location, process_chain=process_chain)
            status, response = self.wait_until_finished(response=response,
                                                        max_time=10)

            if status == 200:
                return make_response(
                    jsonify({
                        "job_id": response["resource_id"],
                        "job_info": response
                    }), status)
            else:
                error = ErrorSchema(id="1234567890",
                                    code=1,
                                    message=str(response),
                                    links=response["urls"]["status"])
                return make_response(error.to_json(), status)
        except Exception:

            e_type, e_value, e_tb = sys.exc_info()
            traceback_model = dict(message=str(e_value),
                                   traceback=traceback.format_tb(e_tb),
                                   type=str(e_type))
            error = ErrorSchema(id="1234567890",
                                code=2,
                                message=str(traceback_model))
            return make_response(error.to_json(), 400)

    def wait_until_finished(self, response, max_time: int = 10):
        """Poll the status of a resource and assert its finished HTTP status

        The response will be checked if the resource was accepted. Hence it must always be HTTP 200 status.

        The status URL from the response is then polled until status: finished, error or terminated.
        The result of the poll can be checked against its HTTP status and its GRaaS status message.

        Args:
            response: The accept response
            max_time (int): The maximum time to wait, until the job gets killed

        Returns: response

        """
        # Check if the resource was accepted

        resource_id = response["resource_id"]
        start_time = time.time()

        while True:
            status, resp_data = self.iface.resource_info(resource_id)

            if isinstance(resp_data,
                          dict) is False or "status" not in resp_data:
                raise Exception("wrong return values %s" % str(resp_data))
            if resp_data["status"] == "finished" or \
                    resp_data["status"] == "error" or \
                    resp_data["status"] == "terminated":
                break
            time.sleep(1)

            current_time = time.time()
            if current_time - start_time > max_time:
                status_code, data = self.iface.delete_resource(
                    resource_id=resource_id)

                if status_code != 200:
                    raise Exception(f"Unable to terminate job, error: {data}")

        return status, resp_data