class JobsJobIdEstimate(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() # really use ActiniaConfig user + pw ? self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB() def get(self, job_id): """Return information about a single job https://api.openeo.org/#operation/estimate-job """ # TODO # at least one of costs, duration, size must be specified # optional: downloads_included, expires if job_id in self.job_db: estimate = {"costs": 0} return make_response(jsonify(estimate), 200) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404)
class ProcessGraphs(ResourceBase): """The /process_graphs endpoint implementation""" def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.graph_db = GraphDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access process_graphs = [] for key in self.graph_db: graph = self.graph_db[key] title = None if "title" in graph: title = graph["title"] description = None if "description" in graph: description = graph["description"] entry = ProcessGraphListEntry(title=title, description=description, id=key) process_graphs.append(entry) return ProcessGraphList(process_graphs=process_graphs).as_response( http_status=200) # no longer supported, replaced by ProcessGraphId def post(self): try: """Store a process graph in the graph database""" # TODO: Implement user specific database access process_graph_id = f"user-graph-{str(uuid4())}" process_graph = request.get_json() self.graph_db[process_graph_id] = process_graph return make_response(process_graph_id, 201) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) return ErrorSchema( id="1234567890", code=2, message=str(traceback_model)).as_response(http_status=400) def delete(self): """Clear the process graph database""" self.graph_db.clear() return make_response( "All process graphs have been successfully deleted", 204)
def ok_user_and_password(username, password): iface = ActiniaInterface() iface.set_auth(username, password) status_code, locations = iface.list_locations() if status_code != 200: return False else: return True
class GraphValidation(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) def post(self): """Run the job in an ephemeral mapset :return: """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} process_graph = request.get_json() g = Graph(graph_description=process_graph) result_name, process_list = g.to_actinia_process_list() if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: msg = "Processes can only be defined for a single location!" status = 400 es = ErrorSchema(id=str(datetime.now().isoformat()), code=status, message=str(msg)) return make_response(es.to_json(), status) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") status, response = self.iface.sync_ephemeral_processing_validation( location=location, process_chain=process_chain) if status == 200: errors = {"errors": []} return make_response(errors, 200) else: return ErrorSchema( id=str(datetime.now().isoformat()), code=status, message=str(response)).as_response(http_status=status) except Exception as e: return ErrorSchema(id=str(datetime.now().isoformat()), code=400, message=str(e)).as_response(http_status=400)
class GraphValidation(ResourceBase): def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) def post(self): """Run the job in an ephemeral mapset :return: """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} process_graph = request.get_json() # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph(process_graph) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: msg = "Processes can only be defined for a single location!" status = 400 es = ErrorSchema(id=str(datetime.now()), code=status, message=str(msg)) return make_response(es.to_json(), status) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") pprint(process_chain) status, response = self.iface.sync_ephemeral_processing_validation( location=location, process_chain=process_chain) pprint(response) if status == 200: return make_response("", 204) else: es = ErrorSchema(id=str(datetime.now()), code=status, message=str(response)) return make_response(es.to_json(), status) except Exception as e: es = ErrorSchema(id=str(datetime.now()), code=400, message=str(e)) return make_response(es.to_json(), 400)
class Jobs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.graph_db = GraphDB() self.job_db = JobDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access jobs = [] for key in self.job_db: job = self.job_db[key] job.process = None jobs.append(job) job_list = JobList(jobs=jobs, links=[]) return job_list.as_response(http_status=200) def post(self): """Submit a new job to the job database""" # TODO: Implement user specific database access job_id = f"user-job-{str(uuid4())}" # job_id = str(uuid4()) job = request.get_json() # return ErrorSchema(id=uuid4(), message="A process graph is required # in the request").as_response(400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info response = make_response(job_id, 201) # add openeo-identifier response.headers["OpenEO-Identifier"] = job_id # add location, e.g. "https://openeo.org/api/v1.0/resource/<job_id>" response.headers["Location"] = ("%s/%s") % (url_for(".jobs"), job_id) return response def delete(self): """Clear the job database""" self.job_db.clear() return make_response("All jobs has been successfully deleted", 204)
class ProcessGraphs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.graph_db = GraphDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access process_graphs = [] for key in self.graph_db: graph = self.graph_db[key] entry = ProcessGraphListEntry(title=graph["title"], description=graph["description"], process_graph_id=key) process_graphs.append(entry) return make_response(ProcessGraphList(process_graphs=process_graphs).to_json(), 200) def post(self): try: """Store a process graph in the graph database""" # TODO: Implement user specific database access process_graph_id = f"user-graph::{str(uuid4())}" process_graph = request.get_json() self.graph_db[process_graph_id] = process_graph return make_response(process_graph_id, 201) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400) def delete(self): """Clear the process graph database""" self.graph_db.clear() return make_response("All process graphs have been successfully deleted", 204)
class Jobs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.graph_db = GraphDB() self.job_db = JobDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access jobs = [] for key in self.job_db: job = self.job_db[key] job.process_graph = None jobs.append(job) job_list = JobList(jobs=jobs) return make_response(job_list.to_json(), 200) def post(self): """Submit a new job to the job database""" # TODO: Implement user specific database access job_id = f"user-job::{str(uuid4())}" job = request.get_json() if "process_graph" not in job: error = ErrorSchema( id=uuid4(), message="A process graph is required in the request") return make_response(error.to_json(), 400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info return make_response(job_id, 201) def delete(self): """Clear the job database""" self.job_db.clear() return make_response("All jobs has been successfully deleted", 204)
class JobsJobId(ResourceBase): def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.db = GraphDB() self.job_db = JobDB() def get(self, job_id): """Return information about a single job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/get """ if job_id in self.job_db: job = self.job_db[job_id] return make_response(job.to_json(), 200) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database."). to_json(), 404) def patch(self, job_id): try: """Update a job in the job database""" # TODO: Implement user specific database access job = request.get_json() if job_id in self.job_db: if "process_graph" not in job: error = ErrorSchema( id=uuid4(), message="A process graph is required in the job request" ) return make_response(error.to_json(), 400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info return make_response(job_id, 204) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).to_json(), 404) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400) def delete(self, job_id): """Delete a single job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/delete """ if job_id in self.job_db: del self.job_db[job_id] return make_response("The job has been successfully deleted", 204) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database."). to_json(), 404)
class JobsJobIdResults(ResourceBase): def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB() def get(self, job_id): """Return information about a single job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/get """ if job_id in self.job_db: job: JobInformation = self.job_db[job_id] # Check for the actinia id to get the latest actinia job information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] print("Resource id", actinia_id) code, job_info = self.iface.resource_info( resource_id=actinia_id) if code == 200: # Add the actinia information to the openeo job if job.additional_info != job_info: job.additional_info = job_info job.updated = job_info["datetime"] if job_info["status"] == "finished": job.status = "finished" if job_info["status"] == "error": job.status = "error" if job_info["status"] == "accepted": job.status = "queued" if job_info["status"] == "terminated": job.status = "canceled" if job_info["status"] == "running": job.status = "running" # Store the updated job in the database self.job_db[job_id] = job else: if job.additional_info != job_info: job.additional_info = job_info self.job_db[job_id] = job if (job.additional_info['urls'] and job.additional_info['urls']['resources']): resource_links = job.additional_info['urls']['resources'] if job.links is None: job.links = [] for link in resource_links: eo_link = EoLink(href=link) job.links.append(eo_link) return make_response(job.to_json(), 200) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database."). to_json(), 404) def post(self, job_id): """Start a processing job in the actinia backend https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/post """ try: if job_id in self.job_db: job: JobInformation = self.job_db[job_id] status, response = self.send_actinia_processing_request( job=job) self.actinia_job_db[job_id] = response["resource_id"] job.additional_info = response job.status = "queued" job.updated = str(datetime.now()) self.job_db[job_id] = job return make_response( "The creation of the resource has been queued successfully.", 202) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).to_json(), 404) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400) def send_actinia_processing_request(self, job: JobInformation): try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph( {"process_graph": job.process_graph}) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: raise Exception( "Processes can only be defined for a single location!") location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") # pprint.pprint(process_chain) status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) return status, response except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) raise Exception(str(traceback_model)) def delete(self, job_id): """Cancel a running job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/delete """ if job_id in self.job_db: # Check for the actinia id to get the latest actinia job information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.delete_resource( resource_id=actinia_id) return make_response("The job has been successfully cancelled", 204) else: return make_response( ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database."). to_json(), 404)
class Result(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() def post(self): """Run the job in an ephemeral mapset synchronously for 10 seconds. After 10 seconds the running job will be killed on the actinia server and the response will be an termination report. """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} request_doc = request.get_json() g = Graph(graph_description=request_doc) result_name, process_list = g.to_actinia_process_list() if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: descr = "Processes can only be defined for a single location!" return make_response(jsonify({"description": descr}, 400)) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] if location == "stac": location = "latlong_wgs84" process_chain = dict(list=process_list, version="1") status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) status, response = self.wait_until_finished(response=response, max_time=1000) if status == 200: result_url = response["urls"]["resources"] if len(result_url) == 1: # attempt to return an image result_data = self.iface.get_resource(result_url[0]) if result_url[0][-4:] in ("tiff", ".tif"): mimetype = "image/tiff" else: mimetype = "unknown" return Response(result_data.content, mimetype=mimetype, direct_passthrough=True) return make_response( jsonify({ "job_id": response["resource_id"], "job_info": response }), status) else: return ErrorSchema( id="1234567890", code=404, message=str(response), links=response["urls"]["status"]).as_response(status) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) return ErrorSchema(id="1234567890", code=404, message=str(traceback_model)).as_response(404) def wait_until_finished(self, response, max_time: int = 10): """Poll the status of a resource and assert its finished HTTP status The response will be checked if the resource was accepted. Hence it must always be HTTP 200 status. The status URL from the response is then polled until status: finished, error or terminated. The result of the poll can be checked against its HTTP status and its GRaaS status message. Args: response: The accept response max_time (int): The maximum time to wait, until the job gets killed Returns: response """ # Check if the resource was accepted if "resource_id" not in response: raise Exception(f"Internal server error: {str(response)}") resource_id = response["resource_id"] start_time = time.time() while True: status, resp_data = self.iface.resource_info(resource_id) if isinstance(resp_data, dict) is False or "status" not in resp_data: raise Exception("wrong return values %s" % str(resp_data)) if resp_data["status"] == "finished" or \ resp_data["status"] == "error" or \ resp_data["status"] == "terminated": break time.sleep(1) current_time = time.time() if current_time - start_time > max_time: status_code, data = self.iface.delete_resource( resource_id=resource_id) if status_code != 200: raise Exception(f"Unable to terminate job, error: {data}") return status, resp_data
class JobsJobIdResults(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() # really use ActiniaConfig user + pw ? self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB() def get(self, job_id): """Return information about a single job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/get """ if job_id in self.job_db: job: JobInformation = self.job_db[job_id] job.stac_version = CAPABILITIES['stac_version'] job.type = "Feature" job.geometry = "json:null" job.properties = dict() job.properties['datetime'] = None job.assets = dict() job.links = [] # Check for the actinia id to get the latest actinia job # information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.resource_info( resource_id=actinia_id) if code == 200: # Add the actinia information to the openeo job if job.additional_info != job_info: job.additional_info = job_info job.updated = job_info["datetime"].replace( " ", "T").replace("'", "").replace('"', '') if job_info["status"] == "finished": job.status = "finished" if job_info["status"] == "error": job.status = "error" if job_info["status"] == "accepted": job.status = "queued" if job_info["status"] == "terminated": job.status = "canceled" if job_info["status"] == "running": job.status = "running" # Store the updated job in the database self.job_db[job_id] = job else: if job.additional_info != job_info: job.additional_info = job_info self.job_db[job_id] = job if (job.additional_info['urls'] and "resources" in job.additional_info['urls']): resource_links = job.additional_info['urls']['resources'] if job.links is None: job.links = [] for link in resource_links: eo_link = EoLink(href=link) job.links.append(eo_link) return job.as_response(http_status=200) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404) def post(self, job_id): """Start a processing job in the actinia backend https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/post """ try: if job_id in self.job_db: job: JobInformation = self.job_db[job_id] status, response = self.send_actinia_processing_request( job=job) if "resource_id" not in response: return make_response( ErrorSchema( id="12345678", code=status, message=f"Internal server error: {str(response)}"). to_json(), status) self.actinia_job_db[job_id] = response["resource_id"] job.additional_info = response job.status = "queued" job.updated = str(datetime.now().isoformat()) self.job_db[job_id] = job return make_response( "The creation of the resource has been queued successfully.", 202) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) return ErrorSchema( id="1234567890", code=2, message=str(traceback_model)).as_response(http_status=400) def send_actinia_processing_request(self, job: JobInformation): try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} graph = Graph(job.process) result_name, process_list = graph.to_actinia_process_list() if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: raise Exception( "Processes can only be defined for a single location!") location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") # pprint.pprint(process_chain) status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) return status, response except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) raise Exception(str(traceback_model)) def delete(self, job_id): """Cancel a running job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}~1results/delete """ if job_id in self.job_db: # Check for the actinia id to get the latest actinia job # information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.delete_resource( resource_id=actinia_id) return make_response("The job has been successfully cancelled", 204) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404)
def register_processes(): iface = ActiniaInterface() iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) # TODO: add logger print("Requesting modules from %s..." % ActiniaConfig.HOST) status_code, modules = iface.list_modules() if status_code == 200: # TODO: add logger print("Registering modules...") for module in modules: # convert grass module names to openeo process names # special treatment for GRASS modules in # T_BASENAME_MODULES_LIST if module["id"] in T_BASENAME_MODULES_LIST: if "parameters" in module: module["parameters"] = [ i for i in module["parameters"] if i["name"] != "basename" ] if "returns" in module: module["returns"] = [ i for i in module["returns"] if i["name"] != "basename" ] process = module["id"].replace('.', '_') actiniaid = module["id"] if "parameters" in module: for item in module["parameters"]: if "subtype" in item["schema"]: if item["schema"]["subtype"] in ("cell", "strds"): item["schema"]["type"] = "object" item["schema"]["subtype"] = "raster-cube" if "returns" in module: for item in module["returns"]: if "subtype" in item["schema"]: if item["schema"]["subtype"] in ("cell", "strds"): item["schema"]["type"] = "object" item["schema"]["subtype"] = "raster-cube" # create "pseudo" modules which comply to openeo if ('returns' in module and type(module['returns']) is list and len(module['returns']) > 0): # create "pseudo" module for every output: for returns in module['returns']: pm = dict(module) pm["links"] = dict() pm["links"]["about"] = ( "https://grass.osgeo.org/grass80/manuals/%s.html" % pm["id"]) pm['returns'] = returns process = "%s_%s" % (pm['id'].replace( '.', '_'), returns['name']) pm['id'] = process ACTINIA_OPENEO_PROCESS_DESCRIPTION_DICT[process] = pm OPENEO_ACTINIA_ID_DICT[process] = { "id": actiniaid, "returns": returns["name"] } else: # if no output, assign empty object module["links"] = dict() module["links"]["about"] = ( "https://grass.osgeo.org/grass80/manuals/%s.html" % module["id"]) module['returns'] = {} module["id"] = process OPENEO_ACTINIA_ID_DICT[process] = {"id": actiniaid} ACTINIA_OPENEO_PROCESS_DESCRIPTION_DICT[process] = module # TODO: add logger print("... successfully registered modules!") else: print('... error registering modules!')
class JobsJobId(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() def get(self, job_id): """Return information about a single job https://api.openeo.org/#operation/describe-job """ if job_id in self.job_db: job: JobInformation = self.job_db[job_id] job.stac_version = CAPABILITIES['stac_version'] job.type = "Feature" job.geometry = "json:null" job.properties = dict() job.properties['datetime'] = None job.assets = dict() job.links = [] # Check for the actinia id to get the latest actinia job # information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.resource_info( resource_id=actinia_id) if code == 200: # Add the actinia information to the openeo job if job.additional_info != job_info: job.additional_info = job_info job.updated = job_info["datetime"].replace( " ", "T").replace("'", "").replace('"', '') if job_info["status"] == "finished": job.status = "finished" if job_info["status"] == "error": job.status = "error" if job_info["status"] == "accepted": job.status = "queued" if job_info["status"] == "terminated": job.status = "canceled" if job_info["status"] == "running": job.status = "running" # Store the updated job in the database self.job_db[job_id] = job else: if job.additional_info != job_info: job.additional_info = job_info self.job_db[job_id] = job if (job.additional_info['urls'] and "resources" in job.additional_info['urls']): resource_links = job.additional_info['urls']['resources'] if job.links is None: job.links = [] for link in resource_links: eo_link = EoLink(href=link) job.links.append(eo_link) return job.as_response(http_status=200) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404) def patch(self, job_id): try: """Update a job in the job database""" # TODO: Implement user specific database access job = request.get_json() if job_id in self.job_db: if "process" not in job: return ErrorSchema( id=uuid4(), message="A process is required in the job request" ).as_response(http_status=400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info return make_response(job_id, 204) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) return ErrorSchema( id="1234567890", code=2, message=str(traceback_model)).as_response(http_status=400) def delete(self, job_id): """Delete a single job https://open-eo.github.io/openeo-api/v/0.3.0/apireference/#tag/Job-Management/paths/~1jobs~1{job_id}/delete """ if job_id in self.job_db: if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.delete_resource( resource_id=actinia_id) del self.job_db[job_id] return make_response("The job has been successfully deleted", 204) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404)
class JobsJobIdLogs(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() # really use ActiniaConfig user + pw ? self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB() def get(self, job_id): """Return information about a single job https://api.openeo.org/#operation/debug-job """ if job_id in self.job_db: job: JobInformation = self.job_db[job_id] job_logs = {'logs': [], 'links': []} # Check for the actinia id to get the latest actinia job # information if job_id in self.actinia_job_db: actinia_id = self.actinia_job_db[job_id] code, job_info = self.iface.resource_info( resource_id=actinia_id) if code == 200: # Add the actinia information to the openeo job if job.additional_info != job_info: job.additional_info = job_info job.updated = job_info["datetime"] if job_info["status"] == "finished": job.status = "finished" if job_info["status"] == "error": job.status = "error" if job_info["status"] == "accepted": job.status = "queued" if job_info["status"] == "terminated": job.status = "canceled" if job_info["status"] == "running": job.status = "running" # Store the updated job in the database self.job_db[job_id] = job else: if job.additional_info != job_info: job.additional_info = job_info self.job_db[job_id] = job links = [] if (job.additional_info['urls'] and "resources" in job.additional_info['urls']): resource_links = job.additional_info['urls']['resources'] for link in resource_links: eo_link = EoLink(href=link) links.append(eo_link) job_logs['logs'] = job job_logs['links'] = links return make_response(jsonify(job_logs), 200) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database.").as_response( http_status=404)
class Preview(ResourceBase): def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.db = GraphDB() def post(self): """Run the job in an ephemeral mapset synchronously for 10 seconds. After 10 seconds the running job will be killed on the actinia server and the response will be an termination report. """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} request_doc = request.get_json() process_graph = request_doc["process_graph"] # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph(process_graph) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: return make_response( jsonify( { "description": "Processes can only be defined for a single location!" }, 400)) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") # pprint.pprint(process_chain) status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) status, response = self.wait_until_finished(response=response, max_time=10) if status == 200: return make_response( jsonify({ "job_id": response["resource_id"], "job_info": response }), status) else: error = ErrorSchema(id="1234567890", code=1, message=str(response), links=response["urls"]["status"]) return make_response(error.to_json(), status) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400) def wait_until_finished(self, response, max_time: int = 10): """Poll the status of a resource and assert its finished HTTP status The response will be checked if the resource was accepted. Hence it must always be HTTP 200 status. The status URL from the response is then polled until status: finished, error or terminated. The result of the poll can be checked against its HTTP status and its GRaaS status message. Args: response: The accept response max_time (int): The maximum time to wait, until the job gets killed Returns: response """ # Check if the resource was accepted resource_id = response["resource_id"] start_time = time.time() while True: status, resp_data = self.iface.resource_info(resource_id) if isinstance(resp_data, dict) is False or "status" not in resp_data: raise Exception("wrong return values %s" % str(resp_data)) if resp_data["status"] == "finished" or \ resp_data["status"] == "error" or \ resp_data["status"] == "terminated": break time.sleep(1) current_time = time.time() if current_time - start_time > max_time: status_code, data = self.iface.delete_resource( resource_id=resource_id) if status_code != 200: raise Exception(f"Unable to terminate job, error: {data}") return status, resp_data