def generate_distance(xml_filename, no_data_value): """Provides the main processing algorithm for generating the distance to cloud product. Args: xml_filename <str>: Filename for the ESPA Metadata XML no_data_value <float>: No data (fill) value to use """ logger = logging.getLogger(__name__) # XML metadata espa_metadata = Metadata(xml_filename) espa_metadata.parse() src_info = retrieve_metadata_information(espa_metadata) # Determine output information sensor_code = get_satellite_sensor_code(xml_filename) dataset = gdal.Open(src_info.qa_filename) output_srs = osr.SpatialReference() output_srs.ImportFromWkt(dataset.GetProjection()) output_transform = dataset.GetGeoTransform() samps = dataset.RasterXSize lines = dataset.RasterYSize del dataset # Build distance to cloud information in memory distance_to_cloud = calculate_distance(src_info, no_data_value) # Build distance to cloud filename distance_img_filename = ''.join([xml_filename.split('.xml')[0], '_lst_cloud_distance', '.img']) # Write distance to cloud product write_distance_to_cloud_product(samps=samps, lines=lines, transform=output_transform, wkt=output_srs.ExportToWkt(), no_data_value=no_data_value, filename=distance_img_filename, file_data=distance_to_cloud) add_cloud_distance_band_to_xml(espa_metadata=espa_metadata, filename=distance_img_filename, sensor_code=sensor_code, no_data_value=no_data_value)
def main(): """Main processing for building the points list """ # Command Line Arguments args = retrieve_command_line_arguments() # Check logging level logging_level = logging.INFO if args.debug: logging_level = logging.DEBUG # Setup the default logger format and level. Log to STDOUT. logging.basicConfig(format=('%(asctime)s.%(msecs)03d %(process)d' ' %(levelname)-8s' ' %(filename)s:%(lineno)d:' '%(funcName)s -- %(message)s'), datefmt='%Y-%m-%d %H:%M:%S', level=logging_level, stream=sys.stdout) logger = logging.getLogger(__name__) logger.info('*** Begin Determine Grid Points ***') # XML Metadata espa_metadata = Metadata() espa_metadata.parse(xml_filename=args.xml_filename) # Figure out the gdal objects gdal_objs = initialize_gdal_objects(espa_metadata=espa_metadata) # Determin NARR adjusted data bounds data_bounds = determine_adjusted_data_bounds(espa_metadata=espa_metadata, gdal_objs=gdal_objs) logger.debug(str(data_bounds)) # Generate the point grid generate_point_grid(debug=args.debug, gdal_objs=gdal_objs, data_bounds=data_bounds, data_path=args.data_path) logger.info('*** Determine Grid Points - Complete ***')
def main(): """Main processing for building the points list """ # Command Line Arguments args = retrieve_command_line_arguments() # Check logging level logging_level = logging.INFO if args.debug: logging_level = logging.DEBUG # Setup the default logger format and level. Log to STDOUT. logging.basicConfig(format=('%(asctime)s.%(msecs)03d %(process)d' ' %(levelname)-8s' ' %(filename)s:%(lineno)d:' '%(funcName)s -- %(message)s'), datefmt='%Y-%m-%d %H:%M:%S', level=logging_level, stream=sys.stdout) logger = logging.getLogger(__name__) logger.info('*** Begin MODTRAN Tape5 Generation ***') # XML Metadata espa_metadata = Metadata() espa_metadata.parse(xml_filename=args.xml_filename) # Load the grid information (grid_points, dummy1, dummy2) = read_grid_points() # Load the standard atmospheric layers information std_atmos = [ layer for layer in load_std_atmosphere(data_path=args.data_path) ] generate_modtran_tape5_files(espa_metadata=espa_metadata, data_path=args.data_path, std_atmos=std_atmos, grid_points=grid_points) logger.info('*** MODTRAN Tape5 Generation - Complete ***')
def remove_products_from_xml(self): """Remove the specified products from the XML file The file is read into memory, processed, and written back out with out the specified products. """ # Create and load the metadata object espa_metadata = Metadata(xml_filename=self._xml_filename) # Search for and remove the items for band in espa_metadata.xml_object.bands.band: if band.attrib['product'] in products_to_remove: # Business logic to always keep the radsat_qa band if bt, # or toa, or sr output was chosen if (band.attrib['name'] == '' and (options['include_sr'] or options['include_sr_toa'] or options['include_sr_thermal'])): continue else: self.remove_band_from_xml(band) # Validate the XML espa_metadata.validate() # Write it to the XML file espa_metadata.write(xml_filename=self._xml_filename)
def main(inputxmlfile, outputxmlfile, hdffile): mm = Metadata(xml_filename=inputxmlfile) mm.parse() hls_product = 'hls' if hdffile == 'one': for band in mm.xml_object.bands.iterchildren(): if band.get('name') == 'sr_band1': band.set('name', 'band01') band.set('product', hls_product) elif band.get('name') == 'sr_band2': band.set('name', 'blue') band.set('product', hls_product) elif band.get('name') == 'sr_band3': band.set('name', 'green') band.set('product', hls_product) elif band.get('name') == 'sr_band4': band.set('name', 'red') band.set('product', hls_product) elif band.get('name') == 'sr_band5': band.set('name', 'band05') band.set('product', hls_product) elif band.get('name') == 'sr_band6': band.set('name', 'band06') band.set('product', hls_product) elif band.get('name') == 'sr_band7': band.set('name', 'band07') band.set('product', hls_product) elif band.get('name') == 'sr_band8': band.set('name', 'band08') band.set('product', hls_product) elif hdffile == 'two': for band in mm.xml_object.bands.iterchildren(): if band.get('name') == 'sr_band8a': band.set('name', 'band8a') band.set('product', hls_product) elif band.get('name') == 'sr_band9': band.set('name', 'band09') band.set('product', hls_product) elif band.get('name') == 'sr_band10': band.set('name', 'band10') band.set('product', hls_product) elif band.get('name') == 'sr_band11': band.set('name', 'band11') band.set('product', hls_product) elif band.get('name') == 'sr_band12': band.set('name', 'band12') band.set('product', hls_product) elif band.get('name') == 'sr_aerosol': band.set('name', 'CLOUD') band.set('product', hls_product) for band in mm.xml_object.bands.iterchildren(): if band.get('product') != hls_product: print((band.get('name'))) mm.xml_object.bands.remove(band) mm.write(xml_filename=outputxmlfile)
def main(): """Core processing for the application """ # Command Line Arguments args = retrieve_command_line_arguments() # Setup the logging level log_level = logging.INFO if args.debug: log_level = logging.DEBUG # Setup the default logger format and level. Log to STDOUT. logging.basicConfig(format=('%(asctime)s.%(msecs)03d %(process)d' ' %(levelname)-8s' ' %(filename)s:%(lineno)d:' '%(funcName)s -- %(message)s'), datefmt='%Y-%m-%d %H:%M:%S', level=log_level, stream=sys.stdout) logger = logging.getLogger(__name__) logger.info('*** Begin Extract Auxiliary NARR Data ***') # XML Metadata espa_metadata = Metadata() espa_metadata.parse(xml_filename=args.xml_filename) try: logger.info('Extracting ST AUX data') extract_narr_aux_data(espa_metadata, args.aux_path) except Exception: logger.exception('Failed processing auxiliary NARR data') raise logger.info('*** Extract Auxiliary NARR Data - Complete ***')
class TestMetadata(unittest.TestCase): """Test a few things, and expand on it someday""" def setUp(self): xml_filename = 'unittests/test.xml' self.mm = Metadata(xml_filename=xml_filename) self.mm.parse() def tearDown(self): pass def test_find_version(self): self.assertEqual(self.mm.xml_object.get('version'), '2.0') def test_find_corners(self): self.assertEqual(self.mm.xml_object.global_metadata.corner[0] .attrib['location'], 'UL') self.assertEqual(self.mm.xml_object.global_metadata.corner[1] .attrib['location'], 'LR') def test_find_band_names(self): self.assertEqual(self.mm.xml_object.bands.band[0].get('name'), 'band1') self.assertEqual(self.mm.xml_object.bands.band[1].get('name'), 'band2') self.assertEqual(self.mm.xml_object.bands.band[2].get('name'), 'band3') self.assertEqual(self.mm.xml_object.bands.band[3].get('name'), 'band4') self.assertEqual(self.mm.xml_object.bands.band[4].get('name'), 'band5') self.assertEqual(self.mm.xml_object.bands.band[5].get('name'), 'band6') def test_write_success(self): # Also tests for successful validation self.mm.write(xml_filename='walnuts_pass.xml') self.assertTrue(os.path.exists('walnuts_pass.xml') == 1) os.unlink('walnuts_pass.xml') def test_validation_fail(self): myE = objectify.ElementMaker(annotate=False, namespace=None, nsmap=None) self.mm.xml_object.animals = myE.root() self.mm.xml_object.animals.tiger = myE.frog('white') self.mm.xml_object.animals.frog = myE.frog('green') with self.assertRaises(XMLError): self.mm.validate()
def main(inputxmlfile, outputxmlfile,): mm = Metadata(xml_filename=inputxmlfile) mm.parse() hls_product = 'hls' for band in mm.xml_object.bands.iterchildren(): if band.get('name') == 'sr_band1': band.set('name', 'band01') band.set('product', hls_product) elif band.get('name') == 'sr_band2': band.set('name', 'band02-blue') band.set('product', hls_product) elif band.get('name') == 'sr_band3': band.set('name', 'band03-green') band.set('product', hls_product) elif band.get('name') == 'sr_band4': band.set('name', 'band04-red') band.set('product', hls_product) elif band.get('name') == 'sr_band5': band.set('name', 'band05') band.set('product', hls_product) elif band.get('name') == 'sr_band6': band.set('name', 'band06') band.set('product', hls_product) elif band.get('name') == 'sr_band7': band.set('name', 'band07') band.set('product', hls_product) elif band.get('name') == 'radsat_qa': band.set('name', 'bandQA') band.set('product', hls_product) elif band.get('name') == 'toa_band9': band.set('name', 'band09') band.set('product', hls_product) elif band.get('name') == 'bt_band10': band.set('name', 'band10') band.set('product', hls_product) elif band.get('name') == 'bt_band11': band.set('name', 'band11') band.set('product', hls_product) elif band.get('name') == 'sr_aerosol': band.set('name', 'CLOUD') band.set('product', hls_product) for band in mm.xml_object.bands.iterchildren(): if band.get('product') != hls_product: mm.xml_object.bands.remove(band) mm.write(xml_filename=outputxmlfile)
class TestMetadata(unittest.TestCase): """Test a few things, and expand on it someday""" def setUp(self): xml_filename = 'unittests/test.xml' self.mm = Metadata(xml_filename=xml_filename) self.mm.parse() def tearDown(self): pass def test_find_version(self): self.assertEqual(self.mm.xml_object.get('version'), '2.0') def test_find_corners(self): self.assertEqual( self.mm.xml_object.global_metadata.corner[0].attrib['location'], 'UL') self.assertEqual( self.mm.xml_object.global_metadata.corner[1].attrib['location'], 'LR') def test_find_band_names(self): self.assertEqual(self.mm.xml_object.bands.band[0].get('name'), 'band1') self.assertEqual(self.mm.xml_object.bands.band[1].get('name'), 'band2') self.assertEqual(self.mm.xml_object.bands.band[2].get('name'), 'band3') self.assertEqual(self.mm.xml_object.bands.band[3].get('name'), 'band4') self.assertEqual(self.mm.xml_object.bands.band[4].get('name'), 'band5') self.assertEqual(self.mm.xml_object.bands.band[5].get('name'), 'band6') def test_write_success(self): # Also tests for successful validation self.mm.write(xml_filename='walnuts_pass.xml') self.assertTrue(os.path.exists('walnuts_pass.xml') == 1) os.unlink('walnuts_pass.xml') def test_validation_fail(self): myE = objectify.ElementMaker(annotate=False, namespace=None, nsmap=None) self.mm.xml_object.animals = myE.root() self.mm.xml_object.animals.tiger = myE.frog('white') self.mm.xml_object.animals.frog = myE.frog('green') with self.assertRaises(XMLError): self.mm.validate()
def generate_qa(xml_filename, no_data_value): """Provides the main processing algorithm for generating the QA product. Args: xml_filename <str>: Filename for the ESPA Metadata XML no_data_value <float>: No data (fill) value to use """ logger = logging.getLogger(__name__) # XML metadata espa_metadata = Metadata(xml_filename) espa_metadata.parse() radiance_src_info \ = retrieve_metadata_information(espa_metadata, 'st_thermal_radiance') transmission_src_info \ = retrieve_metadata_information(espa_metadata, 'st_atmospheric_transmittance') upwelled_src_info \ = retrieve_metadata_information(espa_metadata, 'st_upwelled_radiance') downwelled_src_info \ = retrieve_metadata_information(espa_metadata, 'st_downwelled_radiance') emis_src_info = retrieve_metadata_information(espa_metadata, 'emis') emis_stdev_src_info \ = retrieve_metadata_information(espa_metadata, 'emis_stdev') satellite = espa_metadata.xml_object.global_metadata.satellite thermal_info = retrieve_thermal_constants(espa_metadata, satellite) # Determine output information. Make it like the emissivity band sensor_code = get_satellite_sensor_code(xml_filename) dataset = gdal.Open(emis_src_info.filename) output_srs = osr.SpatialReference() output_srs.ImportFromWkt(dataset.GetProjection()) output_transform = dataset.GetGeoTransform() samps = dataset.RasterXSize lines = dataset.RasterYSize del dataset # Build cloud distance filename distance_img_filename = ''.join( [xml_filename.split('.xml')[0], '_st_cloud_distance', '.img']) # Build QA information in memory qa = calculate_qa(radiance_src_info.filename, transmission_src_info.filename, upwelled_src_info.filename, downwelled_src_info.filename, emis_src_info.filename, emis_stdev_src_info.filename, distance_img_filename, satellite, float(thermal_info.k1), float(thermal_info.k2), no_data_value) # Build QA filename qa_img_filename = ''.join( [xml_filename.split('.xml')[0], '_st_uncertainty', '.img']) # Write QA product write_qa_product(samps=samps, lines=lines, transform=output_transform, wkt=output_srs.ExportToWkt(), no_data_value=no_data_value, filename=qa_img_filename, file_data=qa) add_qa_band_to_xml(espa_metadata=espa_metadata, filename=qa_img_filename, sensor_code=sensor_code, no_data_value=no_data_value)
def generate_emissivity_data(xml_filename, server_name, server_path, st_data_dir, no_data_value, intermediate): """Provides the main processing algorithm for generating the estimated Landsat emissivity product. It produces the final emissivity product. Args: xml_filename <str>: Filename for the ESPA Metadata XML server_name <str>: Name of the ASTER GED server server_path <str>: Path on the ASTER GED server st_data_dir <str>: Location of the ST data files no_data_value <int>: No data (fill) value to use intermediate <bool>: Keep any intermediate products generated """ logger = logging.getLogger(__name__) # XML metadata espa_metadata = Metadata(xml_filename) espa_metadata.parse() src_info = emis_util.retrieve_metadata_information(espa_metadata) # Determine output information sensor_code = emis_util.get_satellite_sensor_code(xml_filename) dataset = gdal.Open(src_info.toa.red.name) output_srs = osr.SpatialReference() output_srs.ImportFromWkt(dataset.GetProjection()) output_transform = dataset.GetGeoTransform() samps = dataset.RasterXSize lines = dataset.RasterYSize del dataset # Initialize coefficients. ASTER_GED_WATER = 0.988 coefficients = sensor_coefficients(espa_metadata.xml_object .global_metadata.satellite) # ==================================================================== # Build NDVI in memory (ls_ndvi_data, ndvi_no_data_locations) = ( generate_landsat_ndvi(src_info, no_data_value)) if intermediate: logger.info('Writing Landsat NDVI raster') util.Geo.generate_raster_file(gdal.GetDriverByName('GTiff'), 'internal_landsat_ndvi.tif', ls_ndvi_data, samps, lines, output_transform, output_srs.ExportToWkt(), no_data_value, gdal.GDT_Float32) # ==================================================================== # Determine NDSI and Snow locations (snow_locations, ndsi_no_data_locations) = ( snow_and_ndsi_locations(src_info, no_data_value)) ls_emis_warped_name = 'landsat_emis_warped.tif' aster_ndvi_warped_name = 'aster_ndvi_warped.tif' # Build the estimated Landsat EMIS data from the ASTER GED data and # warp it to the Landsat scenes projection and image extents # For convenience the ASTER NDVI is also extracted and warped to the # Landsat scenes projection and image extents logger.info('Build thermal emissivity band and retrieve ASTER NDVI') build_ls_emis_data(server_name=server_name, server_path=server_path, st_data_dir=st_data_dir, src_info=src_info, coefficients=coefficients, ls_emis_warped_name=ls_emis_warped_name, aster_ndvi_warped_name=aster_ndvi_warped_name, no_data_value=no_data_value, intermediate=intermediate) (ls_emis_data, ls_emis_gap_locations, ls_emis_no_data_locations, aster_ndvi_data, aster_ndvi_gap_locations, aster_ndvi_no_data_locations) \ = (extract_warped_data(ls_emis_warped_name=ls_emis_warped_name, aster_ndvi_warped_name=aster_ndvi_warped_name, no_data_value=no_data_value, intermediate=intermediate)) # Replace NDVI values greater than 1 with 1 ls_ndvi_data[ls_ndvi_data > 1.0] = 1 aster_ndvi_data[aster_ndvi_data > 1.0] = 1 logger.info('Normalizing Landsat and ASTER NDVI') # Normalize Landsat NDVI by max value max_ls_ndvi = ls_ndvi_data.max() min_ls_ndvi = ls_ndvi_data.min() logger.info('Max LS NDVI {0}'.format(max_ls_ndvi)) ls_ndvi_data = ls_ndvi_data / float(max_ls_ndvi) if intermediate: logger.info('Writing Landsat NDVI NORM MAX raster') util.Geo.generate_raster_file(gdal.GetDriverByName('GTiff'), 'internal_landsat_ndvi_norm_max.tif', ls_ndvi_data, samps, lines, output_transform, output_srs.ExportToWkt(), no_data_value, gdal.GDT_Float32) # Normalize ASTER NDVI by max value max_aster_ndvi = aster_ndvi_data.max() logger.info('Max ASTER NDVI {0}'.format(max_aster_ndvi)) aster_ndvi_data = aster_ndvi_data / float(max_aster_ndvi) if intermediate: logger.info('Writing Aster NDVI NORM MAX raster') util.Geo.generate_raster_file(gdal.GetDriverByName('GTiff'), 'internal_aster_ndvi_norm_max.tif', aster_ndvi_data, samps, lines, output_transform, output_srs.ExportToWkt(), no_data_value, gdal.GDT_Float32) # Soil - From prototype code variable name logger.info('Calculating bare soil component') # Get pixels with significant bare soil component bare_locations = np.where(aster_ndvi_data < 0.5) # Only calculate soil component for these pixels ls_emis_bare = ((ls_emis_data[bare_locations] - 0.975 * aster_ndvi_data[bare_locations]) / (1 - aster_ndvi_data[bare_locations])) # Memory cleanup del aster_ndvi_data # Calculate veg adjustment with Landsat logger.info('Calculating EMIS Final') # Adjust estimated Landsat EMIS for vegetation and snow, to generate # the final Landsat EMIS data logger.info('Adjusting estimated EMIS for vegetation') ls_emis_final = (coefficients.vegetation_coeff * ls_ndvi_data + ls_emis_data * (1.0 - ls_ndvi_data)) # Calculate fractional vegetation cover fv_L = 1.0 - (max_ls_ndvi - ls_ndvi_data) / (max_ls_ndvi - min_ls_ndvi) # Memory cleanup del ls_ndvi_data # Add soil component pixels ls_emis_final[bare_locations] = ls_emis_bare # Memory cleanup del ls_emis_bare del bare_locations # Set fill values on granule edge to nan fill_locations = np.where(np.isnan(fv_L)) ls_emis_final[fill_locations] = np.nan # Memory cleanup del fv_L del fill_locations # Final check for emissivity values greater than 1. Reset values greater # than 1 to nominal veg/water value (should be very few, if any) ls_emis_final[ls_emis_final > 1.0] = ASTER_GED_WATER # Medium snow logger.info('Adjusting estimated EMIS for snow') ls_emis_final[snow_locations] = coefficients.snow_emissivity # Memory cleanup del snow_locations # Reset water values ls_emis_final[np.where(ls_emis_data > ASTER_GED_WATER)] = ASTER_GED_WATER # Memory cleanup del ls_emis_data # Add the fill and scan gaps and ASTER gaps back into the results, # since they may have been lost logger.info('Adding fill and data gaps back into the estimated' ' Landsat emissivity results') ls_emis_final[ls_emis_no_data_locations] = no_data_value ls_emis_final[ls_emis_gap_locations] = no_data_value ls_emis_final[aster_ndvi_no_data_locations] = no_data_value ls_emis_final[aster_ndvi_gap_locations] = no_data_value ls_emis_final[ndvi_no_data_locations] = no_data_value ls_emis_final[ndsi_no_data_locations] = no_data_value # Memory cleanup del ls_emis_no_data_locations del ls_emis_gap_locations del aster_ndvi_no_data_locations del aster_ndvi_gap_locations del ndvi_no_data_locations del ndsi_no_data_locations # Write emissivity data and metadata ls_emis_img_filename = ''.join([xml_filename.split('.xml')[0], '_emis', '.img']) emis_util.write_emissivity_product(samps=samps, lines=lines, transform=output_transform, wkt=output_srs.ExportToWkt(), no_data_value=no_data_value, filename=ls_emis_img_filename, file_data=ls_emis_final) emis_util.add_emissivity_band_to_xml(espa_metadata=espa_metadata, filename=ls_emis_img_filename, sensor_code=sensor_code, no_data_value=no_data_value, band_type='mean') # Memory cleanup del ls_emis_final
def generate_emissivity_data(xml_filename, server_name, server_path, st_data_dir, no_data_value, intermediate): """Provides the main processing algorithm for generating the estimated Landsat emissivity product. It produces the final emissivity product. Args: xml_filename <str>: Filename for the ESPA Metadata XML server_name <str>: Name of the ASTER GED server server_path <str>: Path on the ASTER GED server st_data_dir <str>: Location of the ST data files no_data_value <int>: No data (fill) value to use intermediate <bool>: Keep any intermediate products generated """ logger = logging.getLogger(__name__) # XML metadata espa_metadata = Metadata(xml_filename) espa_metadata.parse() src_info = emis_util.retrieve_metadata_information(espa_metadata) # Determine output information sensor_code = emis_util.get_satellite_sensor_code(xml_filename) dataset = gdal.Open(src_info.toa.red.name) output_srs = osr.SpatialReference() output_srs.ImportFromWkt(dataset.GetProjection()) output_transform = dataset.GetGeoTransform() samps = dataset.RasterXSize lines = dataset.RasterYSize del dataset ls_emis_stdev_warped_name = 'landsat_emis_stdev_warped.tif' # Build the estimated Landsat EMIS data from the ASTER GED data and # warp it to the Landsat scenes projection and image extents # For convenience the ASTER NDVI is also extracted and warped to the # Landsat scenes projection and image extents logger.info('Build thermal emissivity standard deviation band ') build_ls_emis_data(server_name=server_name, server_path=server_path, st_data_dir=st_data_dir, src_info=src_info, ls_emis_stdev_warped_name=ls_emis_stdev_warped_name, no_data_value=no_data_value, intermediate=intermediate) (ls_emis_stdev_data, ls_emis_stdev_no_data_locations) = ( extract_warped_data( ls_emis_stdev_warped_name=ls_emis_stdev_warped_name, no_data_value=no_data_value, intermediate=intermediate)) # Add the fill back into the results, since the may have been lost logger.info('Adding fill back into the estimated Landsat emissivity' ' stdev results') ls_emis_stdev_data[ls_emis_stdev_no_data_locations] = no_data_value # Memory cleanup del ls_emis_stdev_no_data_locations # Write emissivity standard deviation data and metadata ls_emis_stdev_img_filename = ''.join([xml_filename.split('.xml')[0], '_emis_stdev', '.img']) emis_util.write_emissivity_product(samps=samps, lines=lines, transform=output_transform, wkt=output_srs.ExportToWkt(), no_data_value=no_data_value, filename=ls_emis_stdev_img_filename, file_data=ls_emis_stdev_data) emis_util.add_emissivity_band_to_xml(espa_metadata=espa_metadata, filename=ls_emis_stdev_img_filename, sensor_code=sensor_code, no_data_value=no_data_value, band_type='stdev') # Memory cleanup del ls_emis_stdev_data
def get_metadata(xml_filename): """Get various values from the specified metadata file""" # Initialize values lon1 = -9999.0 lon2 = -9999.0 lat1 = -9999.0 lat2 = -9999.0 number_lines = -9999 number_samples = -9999 scene_center_time = '' # Verify that the XML file exists if not os.path.exists(xml_filename): message = 'XML file does not exist or is not accessible: {0}'.format( xml_filename) raise IOError(message) # Extract values using ESPA metadata library espa_metadata = Metadata() espa_metadata.parse(xml_filename) global_metadata = espa_metadata.xml_object.global_metadata # Retrieve scene center time scene_center_time = str(global_metadata.scene_center_time) # Find sr_band1 and extract the number of lines and samples from it found_sr_band1 = 0 for band in espa_metadata.xml_object.bands.band: if band.attrib['name'] == 'sr_band1': found_sr_band1 = 1 number_lines = band.attrib['nlines'] number_samples = band.attrib['nsamps'] break if not found_sr_band1: message = 'Could not find XML data for surface reflectance band 1.' raise IOError(message) # Retrieve latitude and longitude values lon1 = global_metadata.bounding_coordinates.west lon2 = global_metadata.bounding_coordinates.east lat1 = global_metadata.bounding_coordinates.north lat2 = global_metadata.bounding_coordinates.south # Verify latitude and longitude values if lat1 < -90.0 or lat1 > 90.0: message = 'North latitude {0} should be from -90.0 to 90.0. '.format( lat1) raise ValueError(message) if lat2 < -90.0 or lat2 > 90.0: message = 'South latitude {0} should be from -90.0 to 90.0. '.format( lat2) raise ValueError(message) if lon1 < -180.0 or lon1 > 180.0: message = 'West longitude {0} should be from -180.0 to 180.0. '.format( lon1) raise ValueError(message) if lon2 < -180.0 or lon2 > 180.0: message = 'East longitude {0} should be from -180.0 to 180.0. '.format( lon2) raise ValueError(message) return scene_center_time, number_lines, number_samples, lon1, lon2, lat1, \ lat2
def convert_bands(xml_filename, no_data_value): """Convert multiple intermediate bands Args: xml_filename <str>: Filename for the ESPA Metadata XML no_data_value <float>: No data (fill) value to use """ # XML metadata espa_metadata = Metadata(xml_filename) espa_metadata.parse() # Convert emissivity band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(EMIS_SCALE_FACTOR), mult_factor=EMIS_MULT_FACTOR, range_min=str(EMIS_RANGE_MIN), range_max=str(EMIS_RANGE_MAX), source_product=EMIS_SOURCE_PRODUCT, band_name=EMIS_BAND_NAME) # Convert emissivity standard deviation band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(EMIS_STDEV_SCALE_FACTOR), mult_factor=EMIS_STDEV_MULT_FACTOR, range_min=str(EMIS_STDEV_RANGE_MIN), range_max=str(EMIS_STDEV_RANGE_MAX), source_product=EMIS_STDEV_SOURCE_PRODUCT, band_name=EMIS_STDEV_BAND_NAME) # Convert cloud distance band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(CLOUD_DISTANCE_SCALE_FACTOR), mult_factor=CLOUD_DISTANCE_MULT_FACTOR, range_min=str(CLOUD_DISTANCE_RANGE_MIN), range_max=str(CLOUD_DISTANCE_RANGE_MAX), source_product=CLOUD_DISTANCE_SOURCE_PRODUCT, band_name=CLOUD_DISTANCE_BAND_NAME) # Convert thermal radiance band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(THERMAL_RADIANCE_SCALE_FACTOR), mult_factor=THERMAL_RADIANCE_MULT_FACTOR, range_min=str(THERMAL_RADIANCE_RANGE_MIN), range_max=str(THERMAL_RADIANCE_RANGE_MAX), source_product=THERMAL_RADIANCE_SOURCE_PRODUCT, band_name=THERMAL_RADIANCE_BAND_NAME) # Convert upwelled radiance band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(UPWELLED_RADIANCE_SCALE_FACTOR), mult_factor=UPWELLED_RADIANCE_MULT_FACTOR, range_min=str(UPWELLED_RADIANCE_RANGE_MIN), range_max=str(UPWELLED_RADIANCE_RANGE_MAX), source_product=UPWELLED_RADIANCE_SOURCE_PRODUCT, band_name=UPWELLED_RADIANCE_BAND_NAME) # Convert downwelled radiance band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(DOWNWELLED_RADIANCE_SCALE_FACTOR), mult_factor=DOWNWELLED_RADIANCE_MULT_FACTOR, range_min=str(DOWNWELLED_RADIANCE_RANGE_MIN), range_max=str(DOWNWELLED_RADIANCE_RANGE_MAX), source_product=DOWNWELLED_RADIANCE_SOURCE_PRODUCT, band_name=DOWNWELLED_RADIANCE_BAND_NAME) # Convert atmospheric transmittance band. convert_band(espa_metadata=espa_metadata, xml_filename=xml_filename, no_data_value=no_data_value, scale_factor=str(ATMOSPHERIC_TRANSMITTANCE_SCALE_FACTOR), mult_factor=ATMOSPHERIC_TRANSMITTANCE_MULT_FACTOR, range_min=str(ATMOSPHERIC_TRANSMITTANCE_RANGE_MIN), range_max=str(ATMOSPHERIC_TRANSMITTANCE_RANGE_MAX), source_product=ATMOSPHERIC_TRANSMITTANCE_SOURCE_PRODUCT, band_name=ATMOSPHERIC_TRANSMITTANCE_BAND_NAME)
def buildMetadata(metadata_filename, bit_counts, clip_extents, tile_id, metadata_locs, production_timestamp, tiled_filenames, segment, region, lng_count): """Build tile metadata starting from the source L2 metadata files.""" logger.debug('Buildmetadata: Entered') SceneTagsToBeKept = ('data_provider', 'satellite', 'instrument', 'acquisition_date', 'scene_center_time', 'level1_production_date', 'wrs', 'product_id', 'lpgs_metadata_file') L1Tuple = [] L2Tuple = [] for i, metafilenames in enumerate(metadata_locs): if i < lng_count: l2tree = ET.parse(metafilenames['L2XML']) l2tree = l2tree.getroot() L2Tuple.append(l2tree) L1Tuple.append(landsat.read_metadatas(metafilenames['L1MTL'])) # read any namespace from the 1st L2 scene namespace = '' if ('{' in l2tree.tag): endPos = (l2tree.tag).find('}') namespace = l2tree.tag[:endPos+1] # Start the output xml outRoot = ET.Element('ard_metadata') outTileMetadata = ET.SubElement(outRoot, 'tile_metadata') outTileGlobal = ET.SubElement(outTileMetadata, 'global_metadata') # data_provider - use L2 scene for child in l2tree.find(namespace + 'global_metadata'): if (child.tag == namespace + 'data_provider'): outDataProvider = ET.SubElement(outTileGlobal, 'data_provider', child.attrib) outDataProvider.text = child.text # satellite - new gm_satellite = ET.SubElement(outTileGlobal, 'satellite') satellite_strs = { 'LT04': 'LANDSAT_4', 'LT05': 'LANDSAT_5', 'LE07': 'LANDSAT_7', 'LC08': 'LANDSAT_8', } gm_satellite.text = satellite_strs[tile_id[:4]] # instrument - new gm_instrument = ET.SubElement(outTileGlobal, 'instrument') instrument_strs = { 'LT04': 'TM', 'LT05': 'TM', 'LE07': 'ETM', 'LC08': 'OLI/TIRS_Combined', } gm_instrument.text = instrument_strs[tile_id[:4]] # Level 1 Collection - new gm_l1coll = ET.SubElement(outTileGlobal, 'level1_collection') gm_l1coll.text = tile_id[34:36] # ARD Version - new gm_ardVersion = ET.SubElement(outTileGlobal, 'ard_version') gm_ardVersion.text = tile_id[38:40] # Region - new gm_region = ET.SubElement(outTileGlobal, 'region') gm_region.text = tile_id[5:7] # acquisition date - use L2 scene for child in l2tree.find(namespace + 'global_metadata'): if (child.tag == namespace + 'acquisition_date'): outAcqDate = ET.SubElement(outTileGlobal, 'acquisition_date', child.attrib) outAcqDate.text = child.text # tile_id - new gm_productid = ET.SubElement(outTileGlobal, 'product_id') gm_productid.text = tile_id # tile_production_date - new gm_tilepd = ET.SubElement(outTileGlobal, 'production_date') gm_tilepd.text = production_timestamp # bounding coordinates - modify L2 scene logger.debug('Buildmetadata: Ready for bounding_coordinates') horiz = tile_id[8:11] vertical = tile_id[11:14] newBoundingCoordsStr = getGeographicBoundingCoordinates(horiz, vertical, region) tempBoundingElement = ET.fromstring(newBoundingCoordsStr) gm_bounding = ET.SubElement(outTileGlobal, tempBoundingElement.tag, tempBoundingElement.attrib) for child in tempBoundingElement: gm_bounding_child = ET.SubElement(gm_bounding, child.tag, child.attrib) gm_bounding_child.text = child.text # projection information - modify L2 scene logger.debug('Buildmetadata: Ready for projection information') newProjInfo = global_createProjInfo(clip_extents, region) tempProjElement = ET.fromstring(newProjInfo) gm_ProjInfo = ET.SubElement(outTileGlobal, tempProjElement.tag, tempProjElement.attrib) for child in tempProjElement: gm_proj_child = ET.SubElement(gm_ProjInfo, child.tag, child.attrib) gm_proj_child.text = child.text if (child.tag == "albers_proj_params"): for projChild in child: gm_proj_grandchild = ET.SubElement(gm_proj_child, projChild.tag, projChild.attrib) gm_proj_grandchild.text = projChild.text # orientation_angle - use L2 scene for child in l2tree.find(namespace + 'global_metadata'): if (child.tag == namespace + 'orientation_angle'): outOrientation = ET.SubElement(outTileGlobal, 'orientation_angle', child.attrib) outOrientation.text = child.text # tile_grid - new gm_tileid = ET.SubElement(outTileGlobal, 'tile_grid') gm_tileid.set('v', tile_id[11:14]) gm_tileid.set('h', tile_id[8:11]) # scene_count - new gm_sc = ET.SubElement(outTileGlobal, 'scene_count') gm_sc.text = str(lng_count) qa_percents = createPixelTypeTuple(bit_counts) # cloud_cover - new gm_cc = ET.SubElement(outTileGlobal, 'cloud_cover') gm_cc.text = qa_percents['cloud_cover'] # cloud_shadow - new gm_cs = ET.SubElement(outTileGlobal, 'cloud_shadow') gm_cs.text = qa_percents['cloud_shadow'] # snow_ice - new gm_si = ET.SubElement(outTileGlobal, 'snow_ice') gm_si.text = qa_percents['snow_ice'] # fill - new gm_fill = ET.SubElement(outTileGlobal, 'fill') gm_fill.text = qa_percents['fill'] # # Build all of the bands for the tile # # This group of tags originate from # a Level 2 metadata file. This section will # describe the tile bands - most of the # information is already correct, but # anything tile related will have to be changed. # outTileBands = ET.SubElement(outTileMetadata, 'bands') # add lineage band lineageStr = createLineageSection(tile_id, production_timestamp) tempLineageElement = ET.fromstring(lineageStr) bands_lineage = ET.SubElement(outTileBands, tempLineageElement.tag, tempLineageElement.attrib) for child in tempLineageElement: bands_lineage_child = ET.SubElement(bands_lineage, child.tag, child.attrib) bands_lineage_child.text = child.text # Loop through all of the bands in the L2 file. # Each band will need to be modified to reflect the # characteristics of the tile. bandsElement = l2tree.find(namespace + 'bands') included_newnames = list() for curBand in bandsElement: oldBandStr = ET.tostring(curBand) newNameOnly, newBandStr = fixTileBand2(tile_id, tiled_filenames, production_timestamp, oldBandStr) if newBandStr is None: logger.debug('Skipping band not in current XML group') continue included_newnames.append(newNameOnly) tempBandElement = ET.fromstring(newBandStr) bands_band = ET.SubElement(outTileBands, tempBandElement.tag, tempBandElement.attrib) for child in tempBandElement: bands_band_child = ET.SubElement(bands_band, child.tag, child.attrib) bands_band_child.text = child.text if (child.tag in ["bitmap_description", "class_values"]): for bandChild in child: bands_band_grandchild = ET.SubElement(bands_band_child, bandChild.tag, bandChild.attrib) bands_band_grandchild.text = bandChild.text logger.debug('Buildmetadata: finished tile bands') # # "Global" and "bands" have now been created for the new tiles. # # Next modify the scene metadata for each contributing scene. # We'll have to read some values from the Level 1 (MTL.txt) file. # for i in range(lng_count): sceneRoot = (L2Tuple[i]) # Read some values from the Level 1 (MTL.txt) file. request_id = getL1Value(L1Tuple[i], "REQUEST_ID") scene_id = getL1Value(L1Tuple[i], "LANDSAT_SCENE_ID") elev_src = getL1Value(L1Tuple[i], "ELEVATION_SOURCE") if any(tile_id.startswith(x) for x in ('LT04', 'LT05', 'LE07')): sensor_mode = getL1Value(L1Tuple[i], "SENSOR_MODE") ephemeris_type = getL1Value(L1Tuple[i], "EPHEMERIS_TYPE") cpf_name = getL1Value(L1Tuple[i], "CPF_NAME") geometric_rmse_model = getL1Value(L1Tuple[i], "GEOMETRIC_RMSE_MODEL") geometric_rmse_model_x = getL1Value(L1Tuple[i], "GEOMETRIC_RMSE_MODEL_X") geometric_rmse_model_y = getL1Value(L1Tuple[i], "GEOMETRIC_RMSE_MODEL_Y") # opening tags for each scene outSceneMetadata = ET.SubElement(outRoot, 'scene_metadata') outSceneIndex = ET.SubElement(outSceneMetadata, 'index') outSceneIndex.text = str(i+1) outSceneGlobal = ET.SubElement(outSceneMetadata, 'global_metadata') # Regurgitate the L2 scene information, # interspursing some additional L1 info # along the way for child in sceneRoot.find(namespace + 'global_metadata'): newTag = (child.tag).replace('ns0:', '') newTag = (child.tag).replace(namespace, '') if (newTag in SceneTagsToBeKept): outGeneric = ET.SubElement(outSceneGlobal, newTag, child.attrib) outGeneric.text = child.text if (newTag == 'wrs'): outGeneric = ET.SubElement(outSceneGlobal, 'request_id') outGeneric.text = request_id outGeneric = ET.SubElement(outSceneGlobal, 'scene_id') outGeneric.text = scene_id if (newTag == 'product_id'): outGeneric = ET.SubElement(outSceneGlobal, 'elevation_source') outGeneric.text = elev_src is_landsat_4_7 = any(tile_id.startswith(x) for x in ('LT04', 'LT05', 'LE07')) if is_landsat_4_7: outGeneric = ET.SubElement(outSceneGlobal, 'sensor_mode') outGeneric.text = sensor_mode outGeneric = ET.SubElement(outSceneGlobal, 'ephemeris_type') outGeneric.text = ephemeris_type outGeneric = ET.SubElement(outSceneGlobal, 'cpf_name') outGeneric.text = cpf_name if (newTag == 'lpgs_metadata_file'): if geometric_rmse_model.find("not found") == -1: outGeneric = ET.SubElement(outSceneGlobal, 'geometric_rmse_model') outGeneric.text = geometric_rmse_model if geometric_rmse_model_x.find("not found") == -1: outGeneric = ET.SubElement(outSceneGlobal, 'geometric_rmse_model_x') outGeneric.text = geometric_rmse_model_x if geometric_rmse_model_y.find("not found") == -1: outGeneric = ET.SubElement(outSceneGlobal, 'geometric_rmse_model_y') outGeneric.text = geometric_rmse_model_y outSceneBands = ET.SubElement(outSceneMetadata, 'bands') # The scene bands for bandTag in sceneRoot.find(namespace + 'bands'): if bandTag.attrib.get('name') not in included_newnames: logger.debug('Skipping band not in current XML group') continue newTag = (bandTag.tag).replace(namespace, '') bandElement = ET.SubElement(outSceneBands, newTag, bandTag.attrib) bandElement.text = bandTag.text for child in bandTag: newTag2 = (child.tag).replace(namespace, '') childElement = ET.SubElement(bandElement, newTag2, child.attrib) childElement.text = child.text if (newTag2 in ["bitmap_description", "class_values"]): for bitmapChild in child: bitmapTag = (bitmapChild.tag).replace(namespace, '') bands_band_bitmap = ET.SubElement(childElement, bitmapTag, bitmapChild.attrib) bands_band_bitmap.text = bitmapChild.text logger.debug('Buildmetadata: Ready to write') namespace1Prefix = "xmlns" namespace2Prefix = "xmlns:xsi" namespace3Prefix = "xsi:schemaLocation" # TODO: these should come from the XSD namespace1URI = "https://landsat.usgs.gov/ard/v1" namespace2URI = "http://www.w3.org/2001/XMLSchema-instance" namespace3URI = ("https://landsat.usgs.gov/ard/v1 " "https://landsat.usgs.gov/ard/ard_metadata_v1_1.xsd") outRoot.attrib[namespace3Prefix] = namespace3URI outRoot.attrib[namespace2Prefix] = namespace2URI outRoot.attrib[namespace1Prefix] = namespace1URI outRoot.attrib["version"] = "1.1" # Add string indentation - Unfortunately, # this function produces extra carriage returns # after some elements... prettyString = ( minidom.parseString(ET.tostring(outRoot) ).toprettyxml(encoding="utf-8", indent=" ") ) # Write to temp file uglyFullName = metadata_filename.replace(".xml", "_ugly.xml") with open(uglyFullName, "w") as f: f.write(prettyString.encode('utf-8')) # Looks like the minidom pretty print added some # blank lines followed by CRLF. The blank lines are # of more than one length in our case. Remove any # blank lines. inMetafile = open(uglyFullName, "r") outMetafile = open(metadata_filename, "w") for curLine in inMetafile: allSpaces = True for curChar in curLine: if ((curChar != '\x20') and (curChar != '\x0D') and (curChar != '\x0A')): allSpaces = False continue if allSpaces is False: outMetafile.write(curLine) # else: # print 'Found blank line' inMetafile.close() outMetafile.close() # Validate metafile that was just created tile_metadata = Metadata(xml_filename=metadata_filename) tile_metadata.validate()
def setUp(self): xml_filename = 'unittests/test.xml' self.mm = Metadata(xml_filename=xml_filename) self.mm.parse()