def encode_eo_metadata(self, coverage, request=None, subset_polygon=None): data_items = list(coverage.data_items.filter( semantic="metadata", format="eogml" )) if len(data_items) >= 1: with open(retrieve(data_items[0])) as f: earth_observation = etree.parse(f).getroot() if subset_polygon: try: feature = earth_observation.xpath( "om:featureOfInterest", namespaces=nsmap )[0] feature[0] = self.encode_footprint( coverage.footprint.intersection(subset_polygon), coverage.identifier ) except IndexError: pass # no featureOfInterest else: earth_observation = self.encode_earth_observation( coverage, subset_polygon=subset_polygon ) if not request: lineage = None elif request.method == "GET": lineage = EOWCS("lineage", EOWCS("referenceGetCoverage", self.encode_reference("Reference", request.build_absolute_uri().replace("&", "&"), False ) ), GML("timePosition", isoformat(now())) ) elif request.method == "POST": # TODO: better way to do this href = request.build_absolute_uri().replace("&", "&") lineage = EOWCS("lineage", EOWCS("referenceGetCoverage", OWS("ServiceReference", OWS("RequestMessage", etree.parse(request).getroot() ), **{ns_xlink("href"): href} ) ), GML("timePosition", isoformat(now())) ) return GMLCOV("metadata", GMLCOV("Extension", EOWCS("EOMetadata", earth_observation, *[lineage] if lineage is not None else [] ) ) )
def test_retrieve_ftp_zip(self): import storages import packages storage = create(models.Storage, url="ftp://anonymous:@localhost:2121/", storage_type="FTP" ) package = create(models.Package, location="package.zip", format="ZIP", storage=storage ) dataset = create(models.DataItem, location="file.txt", package=package, semantic="textfile" ) dataset2 = create(models.DataItem, location="file2.txt", package=package, semantic="textfile" ) with CacheContext() as c: cache_path = retrieve(dataset, c) cache_path2 = retrieve(dataset2, c) self.assertTrue(os.path.exists(cache_path)) self.assertTrue(os.path.exists(cache_path2)) with open(cache_path) as f: self.assertEqual(f.read(), "test\n") with open(cache_path2) as f: self.assertEqual(f.read(), "test 2\n") self.assertFalse(os.path.exists(cache_path)) self.assertFalse(os.path.exists(cache_path2))
def encode_eo_metadata(self, coverage, request=None, subset_polygon=None): data_items = list( coverage.data_items.filter(semantic="metadata", format="eogml")) if len(data_items) >= 1: with open(retrieve(data_items[0])) as f: earth_observation = etree.parse(f).getroot() if subset_polygon: try: feature = earth_observation.xpath("om:featureOfInterest", namespaces=nsmap)[0] feature[0] = self.encode_footprint( coverage.footprint.intersection(subset_polygon), coverage.identifier) except IndexError: pass # no featureOfInterest else: earth_observation = self.encode_earth_observation( coverage, subset_polygon=subset_polygon) if not request: lineage = None elif request.method == "GET": lineage = EOWCS( "lineage", EOWCS( "referenceGetCoverage", self.encode_reference( "Reference", request.build_absolute_uri().replace("&", "&"), False)), GML("timePosition", isoformat(now()))) elif request.method == "POST": # TODO: better way to do this href = request.build_absolute_uri().replace("&", "&") lineage = EOWCS( "lineage", EOWCS( "referenceGetCoverage", OWS("ServiceReference", OWS("RequestMessage", etree.parse(request).getroot()), **{ns_xlink("href"): href})), GML("timePosition", isoformat(now()))) return GMLCOV( "metadata", GMLCOV( "Extension", EOWCS("EOMetadata", earth_observation, *[lineage] if lineage is not None else [])))
def test_retrieve_http(self): import storages, packages storage = create(models.Storage, url="http://eoxserver.org/export/2523/downloads", storage_type="HTTP") dataset = create(models.DataItem, location="EOxServer_documentation-0.3.0.pdf", storage=storage, semantic="pdffile") with CacheContext() as c: cache_path = retrieve(dataset, c) self.assertTrue(os.path.exists(cache_path)) self.assertFalse(os.path.exists(cache_path))
def test_retrieve_http(self): import storages, packages storage = create(models.Storage, url="http://eoxserver.org/export/2523/downloads", storage_type="HTTP" ) dataset = create(models.Dataset, location="EOxServer_documentation-0.3.0.pdf", storage=storage ) with CacheContext() as c: cache_path = retrieve(dataset, c) self.assertTrue(os.path.exists(cache_path)) self.assertFalse(os.path.exists(cache_path))
def _read_metadata(self, data_item, retrieved_metadata, cache): """ Read all available metadata of a ``data_item`` into the ``retrieved_metadata`` :class:`dict`. """ metadata_component = MetadataComponent(env) with open(retrieve(data_item, cache)) as f: content = f.read() reader = metadata_component.get_reader_by_test(content) if reader: values = reader.read(content) format = values.pop("format", None) if format: data_item.format = format data_item.full_clean() data_item.save() for key, value in values.items(): if key in self.metadata_keys: retrieved_metadata.setdefault(key, value)
def _read_metadata(self, data_item, retrieved_metadata, cache): """ Read all available metadata of a ``data_item`` into the ``retrieved_metadata`` :class:`dict`. """ metadata_component = MetadataComponent(env) with open(retrieve(data_item, cache)) as f: content = f.read() reader = metadata_component.get_reader_by_test(content) if reader: values = reader.read(content) format = values.pop("format", None) if format: data_item.format = format data_item.full_clean() data_item.save() for key, value in values.items(): if key in self.metadata_keys: retrieved_metadata.setdefault(key, value)
def _expand_data_item(data_item, cache=None): """ Helper function to expand a source data item to a list of file identifiers. """ backends = BackendComponent(env) storage = data_item.storage package = data_item.package if storage: # get list of files of that storage component = backends.get_storage_component(storage.storage_type) if not component: raise ValueError( "No storage component for type '%s' found." % storage.storage_type ) return component.list_files(storage.url, data_item.location) elif package: # get list of files of that package local_filename = retrieve(package, cache) component = backends.get_package_component(package.format) if not component: raise ValueError( "No package component for type '%s' found." % package.format ) return component.list_files(local_filename, data_item.location) else: # This is a local filename, expand it directly component = backends.get_storage_component("local") if not component: raise ValueError("No active local storage component found.") return component.list_files("", data_item.location)
def _expand_data_item(data_item, cache=None): """ Helper function to expand a source data item to a list of file identifiers. """ backends = BackendComponent(env) storage = data_item.storage package = data_item.package if storage: # get list of files of that storage component = backends.get_storage_component(storage.storage_type) if not component: raise ValueError("No storage component for type '%s' found." % storage.storage_type) return component.list_files(storage.url, data_item.location) elif package: # get list of files of that package local_filename = retrieve(package, cache) component = backends.get_package_component(package.format) if not component: raise ValueError("No package component for type '%s' found." % package.format) return component.list_files(local_filename, data_item.location) else: # This is a local filename, expand it directly component = backends.get_storage_component("local") if not component: raise ValueError("No active local storage component found.") return component.list_files("", data_item.location)
def _eop2html(coverage): yield _lb("Earth Observation:", level=0) yield _lb("Phenomenom Time:", level=1) yield _kv("start:", "%s"%(isoformat(coverage.begin_time)), 2) yield _kv("stop:", "%s"%(isoformat(coverage.end_time)), 2) yield _lb("Spatial Metadata:", level=1) yield _kv("CRS:", "EPSG:%d"%coverage.srid, 2) yield _lb("Extent:", level=2) yield _kv("north:", "%.3f deg"%ext[3], 3) yield _kv("west:", "%.3f deg"%ext[0], 3) yield _kv("east:", "%.3f deg"%ext[2], 3) yield _kv("south:", "%.3f deg"%ext[1], 3) data_items = coverage.data_items data_items = data_items.filter(semantic="metadata", format="eogml") data_items = list(data_items) if len(data_items) < 1: return with open(retrieve(data_items[0])) as fid: eop = etree.parse(fid) # extract metadata md = eop_extract(eop) if md.get("center"): yield _lb("Center:", level=2) yield _kv("latitude:", "%.3f dg"%md["center"][0], 3) yield _kv("longitude:", "%.3f dg"%md["center"][1], 3) def _md(key, label, level=2): if md.get(key) is not None: return _kv(label, md[key], level) return "" yield _lb("Platform:", level=1) yield _md("platformName", "short name:") yield _md("platformSID", "serial identifier:") yield _md("platformOrbitType", "orbit type:") yield _lb("Instrument:", level=1) yield _md("instrumentName", "short name:") yield _md("instrumentDescription", "description:") yield _md("instrumentType", "type:") yield _lb("Sensor:", level=1) yield _md("sensorType", "type:") yield _md("sensorResolution", "resolution:") yield _md("sensorOpMode", "operational mode:") yield _md("sensorSwathId", "swath:") yield _lb("Acquisition:", level=1) yield _md("orbitDirection", "orbit direction:") yield _md("orbitDuration", "orbit duration:") yield _md("orbitNumber", "orbit number:") yield _md("lastOrbitNumber", "last orbit number:") yield _md("ascNodeDate", "asc.node date:") yield _md("ascNodeLongitude", "asc.node longitude:") yield _md("startTimeFromAscNode", "start time from asc.node:") yield _md("complTimeFromAscNode", "compl.time from asc.node:") yield _md("wrsLatitudeGrid", "WRS latitude:") yield _md("wrsLongitudeGrid", "WRS longitude:") yield _md("sunAzimut", "sun azimut:") yield _md("sunElevation", "sun elevation:") yield _md("sunZenit", "sun zenit:") yield _md("intrAzimut", "instrument azimut:") yield _md("intrElevation", "instrument elevation:") yield _md("intrZenit", "instrument zenit:") yield _md("incidence", "incidence angle:") yield _md("acrossTrackIncidence", "across track inc.:") yield _md("alongTrackIncidence", "along track inc.:") yield _md("pitch", "pitch") yield _md("roll", "roll") yield _md("yaw", "yaw") yield _md("antennaLookDir", "antenna look dir.:") yield _md("dopplerFreq", "doppler frequency") yield _md("incidenceVariation", "incidence ang.variation:") yield _md("maxIncidence", "max.incidence angle") yield _md("minIncidence", "min.incidence angle") yield _md("polarChannels", "polarisation channels:") yield _md("polarMode", "polarisation mode:") if md.get("cloudCovPercent") is not None: yield _lb("Cloud Cover:") yield _kv("percentage:", md["cloudCovPercent"], 2) yield _md("cloudCovAsConfidence", "assessment confidence:") yield _md("cloudCovQuotationMode", "quotation mode:") if md.get("snowCovPercent") is not None: yield _lb("Snow Cover:") yield _kv("percentage:", md["snowCovPercent"], 2) yield _md("snowCovAsConfidence", "assessment confidence:") yield _md("snowCovQuotationMode", "quotation mode:") if len(md.get("resultQuality")) > 0: yield _lb("Result Quality:") for item in md.get("resultQuality", []): text = html_escape(item) yield '<tr><td colspan="2"><pre style="width:25em;overflow-x:scroll;">%s</pre></td></tr>'%text