def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param args: The process description arguments :return: (output_names, actinia_process_list) """ output_names = [] process_list = [] # First analyse the data entries if "red" not in process: raise Exception("Process %s requires parameter <red>" % PROCESS_NAME) if "nir" not in process: raise Exception("Process %s requires parameter <nir>" % PROCESS_NAME) # Get the red and ir data separately red_process = dict(myproc="myproc", red=process["red"]) nir_process = dict(myproc="myproc", red=process["nir"]) red_input_names, red_process_list = analyse_process_graph(red_process) process_list.extend(red_process_list) nir_input_names, nir_process_list = analyse_process_graph(nir_process) process_list.extend(nir_process_list) if not red_input_names: raise Exception("Process %s requires an input strds for band <red>" % PROCESS_NAME) if not nir_input_names: raise Exception("Process %s requires an input strds for band <nir>" % PROCESS_NAME) red_stds = red_input_names[-1] nir_strds = nir_input_names[-1] # Take the last entry from the if len(red_input_names) > 1: output_names.extend(red_input_names[0:-1]) # Take the last entry from the if len(nir_input_names) > 1: output_names.extend(nir_input_names[0:-1]) location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( red_stds) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(nir_strds, red_stds, output_name) process_list.extend(pc) return output_names, process_list
def test_get_data_3(self): output_names, pc = analyse_process_graph(graph=GET_DATA_3) pprint(output_names) pprint(pc) self.assertEqual(len(pc), 1)
def test_reduce_time_min(self): name, pc = analyse_process_graph(graph=REDUCE_TIME_MIN) pprint(name) pprint(pc) self.assertEqual(len(pc), 2)
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result layer which is a single raster layer :param args: The process description arguments :return: (output_names, actinia_process_list) """ input_names, process_list = analyse_process_graph(process) output_names = [] if "method" not in process: raise Exception("Parameter method is required.") for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(input_name, process["method"], output_name) process_list.append(pc) return output_names, process_list
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param process: The process description :return: (output_names, actinia_process_list) """ input_names, process_list = analyse_process_graph(process) output_names = [] # First analyse the data entrie if "data_id" not in process: raise Exception("Process %s requires parameter <data_id>" % PROCESS_NAME) output_names.append(process["data_id"]) pc = create_process_chain_entry(input_name=process["data_id"]) process_list.append(pc) # Then add the input to the output for input_name in input_names: # Create the output name based on the input name and method output_name = input_name output_names.append(output_name) return output_names, process_list
def send_actinia_processing_request(self, job: JobInformation): try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph( {"process_graph": job.process_graph}) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: raise Exception( "Processes can only be defined for a single location!") location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") # pprint.pprint(process_chain) status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) return status, response except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) raise Exception(str(traceback_model))
def test_openeo_usecase_1a(self): name, pc = analyse_process_graph(graph=OPENEO_USECASE_1A) pprint(name) pprint(pc) self.assertEqual(len(pc), 8)
def get_process_list(args): """Analyse the process description and return the Actinia process chain and the name of the processing result layer which is a single raster layer :param args: The process description :return: (output_names, actinia_process_list) """ # Get the input description and the process chain to attach this process input_names, process_list = analyse_process_graph(args) output_names = [] for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components(input_name) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) if "python_file_url" in args: python_file_url = args["python_file_url"] else: raise Exception("Python file is missing in the process description") pc = create_process_chain_entry(input_name=input_name, python_file_url=python_file_url, output_name=output_name) process_list.append(pc) return output_names, process_list
def otest_openeo_usecase_2(self): # Disabled since UDF is not supported name, pc = analyse_process_graph(graph=OPENEO_USECASE_2) pprint(name) pprint(pc) self.assertEqual(len(pc), 6)
def test_ndvi_4(self): names, pc = analyse_process_graph(graph=NDVI_4) pprint(names) pprint(pc) self.assertEqual(names[0], "S2A_B04_NDVI2") self.assertEqual(len(pc), 4)
def test_filter_bbox(self): output_names, pc = analyse_process_graph(graph=FILTER_BOX) pprint(output_names) pprint(pc) self.assertEqual(len(pc), 2) self.assertTrue(pc[1]["module"] == "g.region")
def test_ndvi_3(self): names, pc = analyse_process_graph(graph=NDVI_3) pprint(names) pprint(pc) self.assertEqual(names[0], "lsat5_red_NDVI2") self.assertEqual(len(pc), 4)
def post(self): """Run the job in an ephemeral mapset synchronously for 10 seconds. After 10 seconds the running job will be killed on the actinia server and the response will be an termination report. """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} request_doc = request.get_json() process_graph = request_doc["process_graph"] # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph(process_graph) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: return make_response( jsonify( { "description": "Processes can only be defined for a single location!" }, 400)) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") # pprint.pprint(process_chain) status, response = self.iface.async_ephemeral_processing_export( location=location, process_chain=process_chain) status, response = self.wait_until_finished(response=response, max_time=10) if status == 200: return make_response( jsonify({ "job_id": response["resource_id"], "job_info": response }), status) else: error = ErrorSchema(id="1234567890", code=1, message=str(response), links=response["urls"]["status"]) return make_response(error.to_json(), status) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400)
def test_raster_export(self): names, pc = analyse_process_graph(graph=RASTER_EXPORT) pprint(names) pprint(pc) self.assertEqual(names[0], "nc_spm_08.PERMANENT.raster.elevation") self.assertEqual(names[1], "nc_spm_08.PERMANENT.raster.slope") self.assertEqual(len(pc), 4)
def test_daterange(self): output_names, pc = analyse_process_graph(graph=DATERANGE) pprint(output_names) pprint(pc) self.assertEqual(len(pc), 2) self.assertTrue(pc[1]["module"] == "t.rast.extract")
def test_ndvi_error(self): try: names, pc = analyse_process_graph(graph=NDVI_ERROR) pprint(names) pprint(pc) self.assertTrue(False) except: pass
def test_zonal_statistics(self): names, pc = analyse_process_graph(graph=ZONAL_STATISTICS) pprint(names) pprint(pc) self.assertEqual(names[1], "latlong_wgs84.asia_gdd_2017.strds.gdd") self.assertEqual( names[0], "latlong_wgs84.modis_ndvi_global.strds.ndvi_16_5600m") self.assertEqual(len(pc), 16)
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param args: The process description arguments :return: (output_names, actinia_process_list) """ output_names = [] # First analyse the data entries if "red" not in process: raise Exception("Process %s requires parameter <red>" % PROCESS_NAME) if "nir" not in process: raise Exception("Process %s requires parameter <nir>" % PROCESS_NAME) red_strds = None nir_strds = None input_names, process_list = analyse_process_graph(process) # Find the red and nir datasets in the input for input_name in input_names: if process["red"] in input_name: red_strds = input_name elif process["nir"] in input_name: nir_strds = input_name else: # Pipe other inputs to the output output_names.append(input_name) if not red_strds: raise Exception("Process %s requires an input strds for band <red>" % PROCESS_NAME) if not nir_strds: raise Exception("Process %s requires an input strds for band <nir>" % PROCESS_NAME) location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( red_strds) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(nir_strds, red_strds, output_name) process_list.extend(pc) return output_names, process_list
def post(self): """Run the job in an ephemeral mapset :return: """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} process_graph = request.get_json() # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph(process_graph) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: msg = "Processes can only be defined for a single location!" status = 400 es = ErrorSchema(id=str(datetime.now()), code=status, message=str(msg)) return make_response(es.to_json(), status) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") pprint(process_chain) status, response = self.iface.sync_ephemeral_processing_validation( location=location, process_chain=process_chain) pprint(response) if status == 200: return make_response("", 204) else: es = ErrorSchema(id=str(datetime.now()), code=status, message=str(response)) return make_response(es.to_json(), status) except Exception as e: es = ErrorSchema(id=str(datetime.now()), code=400, message=str(e)) return make_response(es.to_json(), 400)
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param process: The process description :return: (output_names, actinia_process_list) """ input_names, process_list = analyse_process_graph(process) output_names = [] if "spatial_extent" not in process.keys(): raise Exception("Process %s requires parameter <spatial_extent>" % PROCESS_NAME) if "left" not in process["spatial_extent"] or \ "right" not in process["spatial_extent"] or \ "top" not in process["spatial_extent"] or \ "bottom" not in process["spatial_extent"] or \ "width_res" not in process["spatial_extent"] or \ "height_res" not in process["spatial_extent"]: raise Exception("Process %s requires parameter left, right, top, bottom, " "width_res, height_res" % PROCESS_NAME) left = process["spatial_extent"]["left"] right = process["spatial_extent"]["right"] top = process["spatial_extent"]["top"] bottom = process["spatial_extent"]["bottom"] width_res = process["spatial_extent"]["width_res"] height_res = process["spatial_extent"]["height_res"] pc = create_process_chain_entry(left=left, right=right, top=top, bottom=bottom, width_res=width_res, height_res=height_res) process_list.append(pc) for input_name in input_names: # Create the output name based on the input name and method output_name = input_name output_names.append(output_name) return output_names, process_list
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result strds that was filtered by start and end date :param process: The process description :return: (output_names, actinia_process_list) """ # Get the input description and the process chain to attach this process input_names, process_list = analyse_process_graph(process) output_names = [] for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) # Skip if the datatype is not a strds and put the input into the output if datatype and datatype != "strds": output_names.append(input_name) continue output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) start_time = None end_time = None if "from" in process: start_time = process["from"] if "to" in process: end_time = process["to"] pc = create__process_chain_entry(input_name=input_name, start_time=start_time, end_time=end_time, output_name=output_name) process_list.append(pc) return output_names, process_list