class JobsJobIdEstimate(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() # really use ActiniaConfig user + pw ? self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB() def get(self, job_id): """Return information about a single job https://api.openeo.org/#operation/estimate-job """ # TODO # at least one of costs, duration, size must be specified # optional: downloads_included, expires if job_id in self.job_db: estimate = {"costs": 0} return make_response(jsonify(estimate), 200) else: return ErrorSchema( id="123456678", code=404, message=f"job with id {job_id} not found in database." ).as_response(http_status=404)
def create_process_chain_entry(nir_time_series, red_time_series, output_time_series): """Create a Actinia process description that uses t.rast.series to create the minimum value of the time series. :param nir_time_series: The NIR band time series name :param red_time_series: The RED band time series name :param output_time_series: The name of the output time series :return: A list of Actinia process chain descriptions """ nir_time_series = ActiniaInterface.layer_def_to_grass_map_name( nir_time_series) red_time_series = ActiniaInterface.layer_def_to_grass_map_name( red_time_series) output_name = ActiniaInterface.layer_def_to_grass_map_name( output_time_series) rn = randint(0, 1000000) pc = [{ "id": "t_rast_mapcalc_%i" % rn, "module": "t.rast.mapcalc", "inputs": [{ "param": "expression", "value": "%(result)s = float((%(nir)s - %(red)s)/" "(%(nir)s + %(red)s))" % { "result": output_name, "nir": nir_time_series, "red": red_time_series } }, { "param": "inputs", "value": "%(nir)s,%(red)s" % { "nir": nir_time_series, "red": red_time_series } }, { "param": "basename", "value": "ndvi" }, { "param": "output", "value": output_name }] }, { "id": "t_rast_color_%i" % rn, "module": "t.rast.colors", "inputs": [{ "param": "input", "value": output_name }, { "param": "color", "value": "ndvi" }] }] return pc
def test_async_persistent_processing(self): iface = ActiniaInterface(self.gconf) process_chain = { "version": "1", "list": [{ "id": "g_region_1", "module": "g.region", "flags": "g" }] } status, response = iface.async_persistent_processing( location="nc_spm_08", mapset="new_user_mapset", process_chain=process_chain) resource_id = response["resource_id"] print(status) print(resource_id) self.assertEqual(status, 200) status, info = iface.resource_info(resource_id) print(status) print(info) time.sleep(2) status, info = iface.resource_info(resource_id) print(status) print(info) self.assertEqual(info["status"], "finished")
def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB()
class ProcessGraphs(ResourceBase): """The /process_graphs endpoint implementation""" def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.graph_db = GraphDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access process_graphs = [] for key in self.graph_db: graph = self.graph_db[key] title = None if "title" in graph: title = graph["title"] description = None if "description" in graph: description = graph["description"] entry = ProcessGraphListEntry(title=title, description=description, id=key) process_graphs.append(entry) return ProcessGraphList(process_graphs=process_graphs).as_response( http_status=200) # no longer supported, replaced by ProcessGraphId def post(self): try: """Store a process graph in the graph database""" # TODO: Implement user specific database access process_graph_id = f"user-graph-{str(uuid4())}" process_graph = request.get_json() self.graph_db[process_graph_id] = process_graph return make_response(process_graph_id, 201) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) return ErrorSchema( id="1234567890", code=2, message=str(traceback_model)).as_response(http_status=400) def delete(self): """Clear the process graph database""" self.graph_db.clear() return make_response( "All process graphs have been successfully deleted", 204)
def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() # really use ActiniaConfig user + pw ? self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.db = GraphDB() self.job_db = JobDB() self.actinia_job_db = ActiniaJobDB()
def test_list_vector(self): iface = ActiniaInterface(self.gconf) status, layers = iface.list_vector(location="nc_spm_08", mapset="PERMANENT") pprint(layers) self.assertEqual(status, 200) self.assertEqual(len(layers), 46)
def ok_user_and_password(username, password): iface = ActiniaInterface() iface.set_auth(username, password) status_code, locations = iface.list_locations() if status_code != 200: return False else: return True
def test_list_strds(self): iface = ActiniaInterface(self.gconf) status, layers = iface.list_strds(location="latlong_wgs84", mapset="modis_ndvi_global") pprint(layers) self.assertEqual(status, 200) self.assertEqual(len(layers), 1)
def test_mapset_info(self): iface = ActiniaInterface(self.gconf) status, info = iface.mapset_info(location="latlong_wgs84", mapset="modis_ndvi_global") pprint(info) self.assertEqual(status, 200) self.assertTrue("region" in info) self.assertTrue("projection" in info)
class GraphValidation(ResourceBase): def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) def post(self): """Run the job in an ephemeral mapset :return: """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} process_graph = request.get_json() g = Graph(graph_description=process_graph) result_name, process_list = g.to_actinia_process_list() if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: msg = "Processes can only be defined for a single location!" status = 400 es = ErrorSchema(id=str(datetime.now().isoformat()), code=status, message=str(msg)) return make_response(es.to_json(), status) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") status, response = self.iface.sync_ephemeral_processing_validation( location=location, process_chain=process_chain) if status == 200: errors = {"errors": []} return make_response(errors, 200) else: return ErrorSchema( id=str(datetime.now().isoformat()), code=status, message=str(response)).as_response(http_status=status) except Exception as e: return ErrorSchema(id=str(datetime.now().isoformat()), code=400, message=str(e)).as_response(http_status=400)
def test_strds_info(self): iface = ActiniaInterface(self.gconf) status, info = iface.layer_info( layer_name="latlong_wgs84.modis_ndvi_global.strds.ndvi_16_5600m") pprint(info) self.assertEqual(status, 200) self.assertTrue("temporal_type" in info) self.assertTrue("aggregation_type" in info) self.assertTrue("creation_time" in info) self.assertTrue("creator" in info) self.assertTrue("granularity" in info) self.assertTrue("modification_time" in info) self.assertTrue("number_of_maps" in info)
def disfunc_test_mapset_creation_deletion(self): config = self.gconf iface = ActiniaInterface(config) status, response = iface.create_mapset(location="nc_spm_08", mapset="new_mapset") print(status) self.assertEqual(status, 200) print(response) status, response = iface.delete_mapset(location="nc_spm_08", mapset="new_mapset") print(status) self.assertEqual(status, 200) print(response)
class GraphValidation(ResourceBase): def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) def post(self): """Run the job in an ephemeral mapset :return: """ try: # Empty the process location ActiniaInterface.PROCESS_LOCATION = {} process_graph = request.get_json() # Transform the process graph into a process chain and store the input location # Check all locations in the process graph result_name, process_list = analyse_process_graph(process_graph) if len(ActiniaInterface.PROCESS_LOCATION) == 0 or len( ActiniaInterface.PROCESS_LOCATION) > 1: msg = "Processes can only be defined for a single location!" status = 400 es = ErrorSchema(id=str(datetime.now()), code=status, message=str(msg)) return make_response(es.to_json(), status) location = ActiniaInterface.PROCESS_LOCATION.keys() location = list(location)[0] process_chain = dict(list=process_list, version="1") pprint(process_chain) status, response = self.iface.sync_ephemeral_processing_validation( location=location, process_chain=process_chain) pprint(response) if status == 200: return make_response("", 204) else: es = ErrorSchema(id=str(datetime.now()), code=status, message=str(response)) return make_response(es.to_json(), status) except Exception as e: es = ErrorSchema(id=str(datetime.now()), code=400, message=str(e)) return make_response(es.to_json(), 400)
class Jobs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): ResourceBase.__init__(self) self.iface = ActiniaInterface() self.iface.set_auth(ActiniaConfig.USER, ActiniaConfig.PASSWORD) self.graph_db = GraphDB() self.job_db = JobDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access jobs = [] for key in self.job_db: job = self.job_db[key] job.process = None jobs.append(job) job_list = JobList(jobs=jobs, links=[]) return job_list.as_response(http_status=200) def post(self): """Submit a new job to the job database""" # TODO: Implement user specific database access job_id = f"user-job-{str(uuid4())}" # job_id = str(uuid4()) job = request.get_json() # return ErrorSchema(id=uuid4(), message="A process graph is required # in the request").as_response(400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info response = make_response(job_id, 201) # add openeo-identifier response.headers["OpenEO-Identifier"] = job_id # add location, e.g. "https://openeo.org/api/v1.0/resource/<job_id>" response.headers["Location"] = ("%s/%s") % (url_for(".jobs"), job_id) return response def delete(self): """Clear the job database""" self.job_db.clear() return make_response("All jobs has been successfully deleted", 204)
class ProcessGraphs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.graph_db = GraphDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access process_graphs = [] for key in self.graph_db: graph = self.graph_db[key] entry = ProcessGraphListEntry(title=graph["title"], description=graph["description"], process_graph_id=key) process_graphs.append(entry) return make_response(ProcessGraphList(process_graphs=process_graphs).to_json(), 200) def post(self): try: """Store a process graph in the graph database""" # TODO: Implement user specific database access process_graph_id = f"user-graph::{str(uuid4())}" process_graph = request.get_json() self.graph_db[process_graph_id] = process_graph return make_response(process_graph_id, 201) except Exception: e_type, e_value, e_tb = sys.exc_info() traceback_model = dict(message=str(e_value), traceback=traceback.format_tb(e_tb), type=str(e_type)) error = ErrorSchema(id="1234567890", code=2, message=str(traceback_model)) return make_response(error.to_json(), 400) def delete(self): """Clear the process graph database""" self.graph_db.clear() return make_response("All process graphs have been successfully deleted", 204)
def create_process_chain_entry(input_name, method, output_name): """Create a Actinia process description that uses t.rast.series to reduce a time series. :param input_time_series: The input time series name :param method: The method for time reduction :param output_map: The name of the output map :return: A Actinia process chain description """ input_name = ActiniaInterface.layer_def_to_grass_map_name(input_name) rn = randint(0, 1000000) pc = { "id": "t_rast_series_%i" % rn, "module": "t.rast.series", "inputs": [{ "param": "input", "value": input_name }, { "param": "method", "value": method }, { "param": "output", "value": output_name }], "flags": "t" } return pc
def get_process_list(args): """Analyse the process description and return the Actinia process chain and the name of the processing result layer which is a single raster layer :param args: The process description :return: (output_names, actinia_process_list) """ # Get the input description and the process chain to attach this process input_names, process_list = process_node_to_actinia_process_chain(args) output_names = [] for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) output_name = create_output_name(layer_name, PROCESS_NAME) output_names.append(output_name) if "python_file_url" in args: python_file_url = args["python_file_url"] else: raise Exception( "Python file is missing in the process description") pc = create_process_chain_entry(input_name=input_name, python_file_url=python_file_url, output_name=output_name) process_list.append(pc) return output_names, process_list
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result layer which is a single raster layer :param args: The process description arguments :return: (output_names, actinia_process_list) """ input_names, process_list = analyse_process_graph(process) output_names = [] if "method" not in process: raise Exception("Parameter method is required.") for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(input_name, process["method"], output_name) process_list.append(pc) return output_names, process_list
def create_process_chain_entry(input_name, python_file_url, output_name): """Create a Actinia command of the process chain that uses g.region to create a valid computational region for the provide input strds :param strds_name: The name of the strds :param python_file_url: The URL to the python file that defines the UDF :param output_name: The name of the output raster layer :return: A Actinia process chain description """ location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components(input_name) input_name = layer_name if mapset is not None: input_name = layer_name + "@" + mapset pc = {"id": "t_rast_aggr_func", "module": "t.rast.aggr_func", "inputs": [{"import_descr": {"source": python_file_url, "type": "file"}, "param": "pyfile", "value": "$file::my_py_func"}, {"param": "input", "value": input_name}, {"param": "output", "value": output_name}]} return pc
class Jobs(ResourceBase): """The /jobs endpoint implementation""" def __init__(self): self.iface = ActiniaInterface() self.iface.set_auth(request.authorization.username, request.authorization.password) self.graph_db = GraphDB() self.job_db = JobDB() def get(self): """Return all jobs in the job database""" # TODO: Implement user specific database access jobs = [] for key in self.job_db: job = self.job_db[key] job.process_graph = None jobs.append(job) job_list = JobList(jobs=jobs) return make_response(job_list.to_json(), 200) def post(self): """Submit a new job to the job database""" # TODO: Implement user specific database access job_id = f"user-job::{str(uuid4())}" job = request.get_json() if "process_graph" not in job: error = ErrorSchema( id=uuid4(), message="A process graph is required in the request") return make_response(error.to_json(), 400) job_info = check_job(job=job, job_id=job_id) self.job_db[job_id] = job_info return make_response(job_id, 201) def delete(self): """Clear the job database""" self.job_db.clear() return make_response("All jobs has been successfully deleted", 204)
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param args: The process description arguments :return: (output_names, actinia_process_list) """ output_names = [] process_list = [] # First analyse the data entries if "red" not in process: raise Exception("Process %s requires parameter <red>" % PROCESS_NAME) if "nir" not in process: raise Exception("Process %s requires parameter <nir>" % PROCESS_NAME) # Get the red and ir data separately red_process = dict(myproc="myproc", red=process["red"]) nir_process = dict(myproc="myproc", red=process["nir"]) red_input_names, red_process_list = analyse_process_graph(red_process) process_list.extend(red_process_list) nir_input_names, nir_process_list = analyse_process_graph(nir_process) process_list.extend(nir_process_list) if not red_input_names: raise Exception("Process %s requires an input strds for band <red>" % PROCESS_NAME) if not nir_input_names: raise Exception("Process %s requires an input strds for band <nir>" % PROCESS_NAME) red_stds = red_input_names[-1] nir_strds = nir_input_names[-1] # Take the last entry from the if len(red_input_names) > 1: output_names.extend(red_input_names[0:-1]) # Take the last entry from the if len(nir_input_names) > 1: output_names.extend(nir_input_names[0:-1]) location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( red_stds) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(nir_strds, red_stds, output_name) process_list.extend(pc) return output_names, process_list
def create__process_chain_entry(input_name, start_time, end_time, output_name): """Create a Actinia command of the process chain that uses t.rast.extract to create a subset of a strds :param strds_name: The name of the strds :param start_time: :param end_time: :return: A Actinia process chain description """ location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) input_name = layer_name if mapset is not None: input_name = layer_name + "@" + mapset base_name = "%s_extract" % layer_name # Get info about the time series to extract its resolution settings and bbox rn = randint(0, 1000000) pc = { "id": "t_rast_extract_%i" % rn, "module": "t.rast.extract", "inputs": [{ "param": "input", "value": input_name }, { "param": "where", "value": "start_time >= '%(start)s' " "AND end_time <= '%(end)s'" % { "start": start_time, "end": end_time } }, { "param": "output", "value": output_name }, { "param": "expression", "value": "1.0 * %s" % input_name }, { "param": "basename", "value": base_name }, { "param": "suffix", "value": "num" }] } return pc
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result :param args: The process description arguments :return: (output_names, actinia_process_list) """ output_names = [] # First analyse the data entries if "red" not in process: raise Exception("Process %s requires parameter <red>" % PROCESS_NAME) if "nir" not in process: raise Exception("Process %s requires parameter <nir>" % PROCESS_NAME) red_strds = None nir_strds = None input_names, process_list = analyse_process_graph(process) # Find the red and nir datasets in the input for input_name in input_names: if process["red"] in input_name: red_strds = input_name elif process["nir"] in input_name: nir_strds = input_name else: # Pipe other inputs to the output output_names.append(input_name) if not red_strds: raise Exception("Process %s requires an input strds for band <red>" % PROCESS_NAME) if not nir_strds: raise Exception("Process %s requires an input strds for band <nir>" % PROCESS_NAME) location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( red_strds) output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) pc = create_process_chain_entry(nir_strds, red_strds, output_name) process_list.extend(pc) return output_names, process_list
def get_process_list(process): """Analyse the process description and return the Actinia process chain and the name of the processing result strds that was filtered by start and end date :param process: The process description :return: (output_names, actinia_process_list) """ # Get the input description and the process chain to attach this process input_names, process_list = analyse_process_graph(process) output_names = [] for input_name in input_names: location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components( input_name) # Skip if the datatype is not a strds and put the input into the output if datatype and datatype != "strds": output_names.append(input_name) continue output_name = "%s_%s" % (layer_name, PROCESS_NAME) output_names.append(output_name) start_time = None end_time = None if "from" in process: start_time = process["from"] if "to" in process: end_time = process["to"] pc = create__process_chain_entry(input_name=input_name, start_time=start_time, end_time=end_time, output_name=output_name) process_list.append(pc) return output_names, process_list
def create_process_chain_entry(input_name): """Create a Actinia process description that uses t.rast.series to create the minimum value of the time series. :param input_time_series: The input time series name :param output_map: The name of the output map :return: A Actinia process chain description """ location, mapset, datatype, layer_name = ActiniaInterface.layer_def_to_components(input_name) input_name = layer_name if mapset is not None: input_name = layer_name + "@" + mapset rn = randint(0, 1000000) pc = {} if datatype == "raster": pc = {"id": "r_info_%i" % rn, "module": "r.info", "inputs": [{"param": "map", "value": input_name}, ], "flags": "g"} elif datatype == "vector": pc = {"id": "v_info_%i" % rn, "module": "v.info", "inputs": [{"param": "map", "value": input_name}, ], "flags": "g"} elif datatype == "strds": pc = {"id": "t_info_%i" % rn, "module": "t.info", "inputs": [{"param": "input", "value": input_name}, ], "flags": "g"} else: raise Exception("Unsupported datatype") return pc
def create_process_chain_entry(data_object: DataObject, output_object: DataObject): """Create a Actinia process description that uses t.rast.series and r.mapcalc to create a multilayer mask. :param data_object: The input time series object :param output_object: The name of the output raster map :return: A Actinia process chain description """ output_temp_object = DataObject(name=f"{output_object.name}_temp", datatype=GrassDataType.RASTER) # get number of maps in input_time_series iface = ActiniaInterface() # this is not working because the input object might not yet exist status_code, layer_data = iface.layer_info( layer_name=data_object.grass_name()) if status_code != 200: return make_response( jsonify( { "description": "An internal error occurred " "while catching GRASS GIS layer information " "for layer <%s>!\n Error: %s" "" % (data_object, str(layer_data)) }, 400)) nmaps = layer_data['number_of_maps'] rn = randint(0, 1000000) pc = [{ "id": "t_rast_series_%i" % rn, "module": "t.rast.series", "inputs": [{ "param": "input", "value": data_object.grass_name() }, { "param": "method", "value": "count" }, { "param": "output", "value": output_temp_object.grass_name() }], "flags": "t" }, { "id": "r_mapcalc_%i" % rn, "module": "r.mapcalc", "inputs": [{ "param": "expression", "value": "%(result)s = int(if(%(raw)s < %(nmaps)s, 1, 0))" % { "result": output_object.grass_name(), "raw": output_temp_object.grass_name(), "nmaps": str(nmaps) } }], }] # g.remove raster name=output_name_tmp -f ? return pc
def __init__(self): self.iface = ActiniaInterface()
class CollectionInformationResource(Resource): def __init__(self): self.iface = ActiniaInterface() def get(self, name): # List strds maps from the GRASS location location, mapset, datatype, layer = self.iface.layer_def_to_components( name) if location == "stac": status_code, collection = self.iface.get_stac_collection(name=name) if status_code != 200: return make_response( jsonify( { "id": "12345678", "code": "Internal", "message": "Server error: %s" % (name), "links": {}}), 500) # Not using CollectionInformation model here for now # as valid STAC collections comply. # Using it here might omit some properties # which are not modelled in this backend (e.g. assets) return make_response(collection, 200) status_code, layer_data = self.iface.layer_info(layer_name=name) if status_code != 200: return make_response( jsonify( { "id": "12345678", "code": "CollectionNotFound", "message": "Collection '%s' does not exist." % (name), "links": {}}), 404) # Get the projection from the GRASS mapset status_code, mapset_info = self.iface.mapset_info( location=location, mapset=mapset) if status_code != 200: return make_response( jsonify( { "id": "12345678", "code": "Internal", "message": "Server error: %s" % (mapset_info), "links": {}}), 500) extent = CollectionExtent( spatial=( float( layer_data["west"]), float( layer_data["south"]), float( layer_data["east"]), float( layer_data["north"])), temporal=( "1900-01-01T00:00:00", "2100-01-01T00:00:00")) title = "Raster dataset" bands = [] dimensions = {"x": { "type": "spatial", "axis": "x", "extent": [layer_data["west"], layer_data["east"]], "reference_system": mapset_info["projection"] }, "y": { "type": "spatial", "axis": "y", "extent": [layer_data["south"], layer_data["north"]], "reference_system": mapset_info["projection"] }, } platform = "unknown" instrument = "unknown" if datatype.lower() == "strds": title = "Space time raster dataset" start_time = layer_data["start_time"] end_time = layer_data["end_time"] if start_time: start_time = start_time.replace( " ", "T").replace( "'", "").replace( '"', '') if end_time: end_time = end_time.replace( " ", "T").replace( "'", "").replace( '"', '') dimensions['t'] = {"type": "temporal", "extent": [start_time, end_time] } extent = CollectionExtent( spatial=( float( layer_data["west"]), float( layer_data["south"]), float( layer_data["east"]), float( layer_data["north"])), temporal=( start_time, end_time)) if "semantic_labels" in layer_data: bandlist = layer_data["semantic_labels"].split(',') dimensions['bands'] = {"type": "bands", "values": bandlist } for bandname in bandlist: # not so nice, better use different name and common_name # waiting for GRASS GIS bands.append(EOBands(name=bandname, common_name=bandname)) # get platform and sensor # see # https://github.com/radiantearth/stac-spec/blob/master/item-spec/common-metadata.md#platform # https://github.com/radiantearth/stac-spec/blob/master/item-spec/common-metadata.md#instruments if "_" in bandlist[0]: sensor_abbr = bandlist[0].split('_')[0] if sensor_abbr == "L5": platform = "landsat-5" instrument = "tm, mss" elif sensor_abbr == "L7": platform = "landsat-7" instrument = "etm+" elif sensor_abbr == "L8": platform = "landsat-8" instrument = "oli, trs" elif sensor_abbr == "S1": platform = "sentinel-1" instrument = "c-sar" elif sensor_abbr == "S2": platform = "sentinel-2" instrument = "msi" if datatype.lower() == "vector": title = "Vector dataset" description = "GRASS GIS location/mapset path: /%s/%s" % ( location, mapset) crs = mapset_info["projection"] coordinate_transform_extent_to_EPSG_4326(crs=crs, extent=extent) # GRASS / actinia do not yet report platform and instrument properties = (CollectionProperties(eo_platform=platform, eo_instrument=instrument, eo_bands=bands)) ci = CollectionInformation(id=name, title=title, description=description, extent=extent, properties=properties, dimensions=dimensions) return ci.as_response(http_status=200)
def __init__(self): self.iface = ActiniaInterface() self.db = GraphDB()