def read_path(self, path): md = parse_landsat8_l1_metadata_file(path) p = md['PRODUCT_METADATA'] ul = float(p['CORNER_UL_LON_PRODUCT']), float(p['CORNER_UL_LAT_PRODUCT']) ur = float(p['CORNER_UR_LON_PRODUCT']), float(p['CORNER_UR_LAT_PRODUCT']) ll = float(p['CORNER_LL_LON_PRODUCT']), float(p['CORNER_LL_LAT_PRODUCT']) lr = float(p['CORNER_LR_LON_PRODUCT']), float(p['CORNER_LR_LAT_PRODUCT']) values = {} values['identifier'] = md['METADATA_FILE_INFO']['LANDSAT_SCENE_ID'] values['footprint'] = Polygon([ul, ur, lr, ll, ul]) time = parse_iso8601('%sT%s' % ( p['DATE_ACQUIRED'], p['SCENE_CENTER_TIME'] )) values['begin_time'] = values['end_time'] = time values['cloud_cover'] = float(md['IMAGE_ATTRIBUTES']['CLOUD_COVER']) values['track'] = p['WRS_PATH'] values['frame'] = p['WRS_ROW'] values['processing_date'] = parse_iso8601( md['METADATA_FILE_INFO']['FILE_DATE'] ) # TODO: maybe convert additional fields from Metadata file return values
def make_subsets(self, begin, end=None): if end is None: return Subsets([Slice("t", parse_iso8601(begin))]) else: return Subsets([Trim( "t", parse_iso8601(begin), parse_iso8601(end) )])
def parse_time(string): items = string.split("/") if len(items) == 1: return Slice("t", parse_iso8601(items[0])) elif len(items) in (2, 3): # ignore resolution return Trim("t", parse_iso8601(items[0]), parse_iso8601(items[1])) raise InvalidParameterException("Invalid TIME parameter.", "time")
def parse_quoted_temporal(value): """ Parses a quoted temporal value. """ if value == "*": return None if not value[0] == '"' and not value[-1] == '"': raise ValueError("Temporal value needs to be quoted with double quotes.") return parse_iso8601(value[1:-1])
def parse_quoted_temporal(value): """ Parses a quoted temporal value. """ if value == "*": return None if not value[0] == '"' and not value[-1] == '"': raise ValueError( "Temporal value needs to be quoted with double quotes.") return parse_iso8601(value[1:-1])
def read(self, obj): md = parse_landsat8_l1_metadata_content(obj) p = md['PRODUCT_METADATA'] ul = float(p['CORNER_UL_LON_PRODUCT']), float( p['CORNER_UL_LAT_PRODUCT']) ur = float(p['CORNER_UR_LON_PRODUCT']), float( p['CORNER_UR_LAT_PRODUCT']) ll = float(p['CORNER_LL_LON_PRODUCT']), float( p['CORNER_LL_LAT_PRODUCT']) lr = float(p['CORNER_LR_LON_PRODUCT']), float( p['CORNER_LR_LAT_PRODUCT']) values = {} values['identifier'] = md['METADATA_FILE_INFO']['LANDSAT_SCENE_ID'] values['footprint'] = Polygon([ul, ur, lr, ll, ul]) time = parse_iso8601('%sT%s' % (p['DATE_ACQUIRED'], p['SCENE_CENTER_TIME'])) values['begin_time'] = values['end_time'] = time return values
def parse_date_or_datetime_11(string): value = parse_iso8601(string) if not value: raise Exception("Could not parse date or datetime from '%s'." % string) return value
def register_stac_product(location, stac_item, product_type=None, storage=None, replace=False): """ Registers a single parsed STAC item as a Product. The product type to be used can be specified via the product_type_name argument. """ identifier = stac_item['id'] replaced = False if replace: if models.Product.objects.filter(identifier=identifier).exists(): models.Product.objects.filter(identifier=identifier).delete() replaced = True geometry = stac_item['geometry'] properties = stac_item['properties'] assets = stac_item['assets'] # fetch the product type by name, metadata or passed object if isinstance(product_type, models.ProductType): pass if isinstance(product_type, str): product_type = models.ProductType.objects.get(name=product_type) else: product_type = models.ProductType.objects.get( name=get_product_type_name(stac_item)) if isinstance(storage, str): storage = backends.Storage.objects.get(name=storage) footprint = GEOSGeometry(json.dumps(geometry)) if 'start_datetime' in properties and 'end_datetime' in properties: start_time = parse_iso8601(properties['start_datetime']) end_time = parse_iso8601(properties['end_datetime']) else: start_time = end_time = parse_iso8601(properties['datetime']) # check if the product already exists if models.Product.objects.filter(identifier=identifier).exists(): if replace: models.Product.objects.filter(identifier=identifier).delete() else: raise RegistrationError('Product %s already exists' % identifier) product = models.Product.objects.create( identifier=identifier, begin_time=start_time, end_time=end_time, footprint=footprint, product_type=product_type, ) metadata = {} simple_mappings = { 'eo:cloud_cover': 'cloud_cover', 'sar:instrument_mode': 'sensor_mode', 'sat:relative_orbit': 'orbit_number', 'view:incidence_angle': ['minimum_incidence_angle', 'maximum_incidence_angle'], 'view:sun_azimuth': 'illumination_azimuth_angle', 'view:sun_elevation': 'illumination_elevation_angle', } for stac_key, field_name in simple_mappings.items(): value = properties.get(stac_key) if value: if isinstance(field_name, str): metadata[field_name] = value else: for name in field_name: metadata[name] = value # 'sar:frequency_band' # 'sar:center_frequency' # doppler_frequency ? # 'sar:product_type' # # 'sar:resolution_range' # 'sar:resolution_azimuth' # 'sar:pixel_spacing_range' # 'sar:pixel_spacing_azimuth' # 'sar:looks_range' # 'sar:looks_azimuth' # 'sar:looks_equivalent_number' # 'view:azimuth' complex_mappings = { 'sar:polarizations': ('polarization_channels', lambda v: ', '.join(v)), 'sar:observation_direction': ('antenna_look_direction', lambda v: v.upper()), 'sat:orbit_state': ('orbit_direction', lambda v: v.upper()), } for stac_key, field_desc in complex_mappings.items(): raw_value = properties.get(stac_key) if raw_value: field_name, prep = field_desc value = prep(raw_value) if isinstance(field_name, str): metadata[field_name] = value else: for name in field_name: metadata[name] = value # actually create the metadata object create_metadata(product, metadata) for asset_name, asset in assets.items(): bands = asset.get('eo:bands') if not bands: continue if not isinstance(bands, list): bands = [bands] band_names = [band['name'] for band in bands] coverage_type = models.CoverageType.objects.get( Q(allowed_product_types=product_type), *[ Q(field_types__identifier=band_name) for band_name in band_names ]) coverage_id = '%s_%s' % (identifier, asset_name) # create the storage item parsed = urlparse(asset['href']) if not isabs(parsed.path): path = normpath(join(dirname(location), parsed.path)) else: path = parsed.path arraydata_item = models.ArrayDataItem( location=path, storage=storage, band_count=len(bands), ) coverage_footprint = footprint if 'proj:geometry' in asset: coverage_footprint = GEOSGeometry( json.dumps(asset['proj:geometry'])) # get/create Grid grid_def = None size = None origin = None shape = asset.get('proj:shape') or properties.get('proj:shape') transform = asset.get('proj:transform') or \ properties.get('proj:transform') epsg = asset.get('proj:epsg') or properties.get('proj:epsg') if shape: size = shape if transform: origin = [transform[transform[0], transform[3]]] if epsg and transform: sr = osr.SpatialReference(epsg) axis_names = ['x', 'y'] if sr.IsProjected() else ['long', 'lat'] grid_def = { 'coordinate_reference_system': epsg, 'axis_names': axis_names, 'axis_types': ['spatial', 'spatial'], 'axis_offsets': [transform[1], transform[5]], } if not grid_def or not size or not origin: ds = gdal_open(arraydata_item) reader = get_reader_by_test(ds) if not reader: raise RegistrationError( 'Failed to get metadata reader for coverage') values = reader.read(ds) grid_def = values['grid'] size = values['size'] origin = values['origin'] grid = get_grid(grid_def) if models.Coverage.objects.filter(identifier=coverage_id).exists(): if replace: models.Coverage.objects.filter(identifier=coverage_id).delete() else: raise RegistrationError('Coverage %s already exists' % coverage_id) coverage = models.Coverage.objects.create( identifier=coverage_id, footprint=coverage_footprint, begin_time=start_time, end_time=end_time, grid=grid, axis_1_origin=origin[0], axis_2_origin=origin[1], axis_1_size=size[0], axis_2_size=size[1], coverage_type=coverage_type, parent_product=product, ) arraydata_item.coverage = coverage arraydata_item.full_clean() arraydata_item.save() # TODO: browses if possible return (product, replaced)
def get_auth_expires_at(client): return parse_iso8601(client.auth_ref['expires_at'])
def read_path(self, path): values = {} root = self.open_manifest(path).getroot() period_elem = root.xpath( 'metadataSection/metadataObject[@ID="acquisitionPeriod"]/' 'metadataWrap/xmlData/safe:acquisitionPeriod', namespaces=nsmap)[0] values['begin_time'] = parse_iso8601( period_elem.findtext('safe:startTime', namespaces=nsmap)) values['end_time'] = parse_iso8601( period_elem.findtext('safe:stopTime', namespaces=nsmap)) coordinates_elems = root.xpath( 'metadataSection/metadataObject[@ID="measurementFrameSet"]/' 'metadataWrap/xmlData/safe:frameSet/safe:frame/safe:footPrint/' 'gml:coordinates', namespaces=nsmap) values['footprint'] = MultiPolygon([ self.parse_coordinates(coordinates_elem.text) for coordinates_elem in coordinates_elems ]).wkt # values['identifier'] = # values['browses'] = [ # (None, tci_path(granule)) # ] # TODO: extended metadata # values['parent_identifier'] # values['production_status'] # values['acquisition_type'] # values['orbit_number'] = # values['orbit_direction'] = # values['track'] # values['frame'] # values['swath_identifier'] = # values['product_version'] = # values['product_quality_status'] # values['product_quality_degradation_tag'] # values['processor_name'] # values['processing_center'] # values['creation_date'] # values['modification_date'] # values['processing_date'] = # values['sensor_mode'] # values['archiving_center'] = # values['processing_mode'] # values['availability_time'] = # values['acquisition_station'] # values['acquisition_sub_type'] # values['start_time_from_ascending_node'] # values['completion_time_from_ascending_node'] # values['illumination_azimuth_angle'] = # values['illumination_zenith_angle'] = # values['illumination_elevation_angle'] # values['polarisation_mode'] # values['polarization_channels'] # values['antenna_look_direction'] # values['minimum_incidence_angle'] # values['maximum_incidence_angle'] # values['doppler_frequency'] # values['incidence_angle_variation'] # values['cloud_cover'] = # values['snow_cover'] # values['lowest_location'] # values['highest_location'] return values
def make_subsets(self, begin, end=None): if end is None: return Subsets([Slice("t", parse_iso8601(begin))]) else: return Subsets( [Trim("t", parse_iso8601(begin), parse_iso8601(end))])
def from_description(cls, axis_types, origins): return cls([ parse_iso8601(orig) if type_ == GRID_TYPE_TEMPORAL else float(orig) for type_, orig in zip(axis_types, origins) ])