def remove_products(xml_filename, product_list):
        '''
        Description:
            Removes the specified products from the file system, as well as
            from the XML file.
        '''

        if not product_list:
            # We don't error, just nothing to do.
            return

        espa_xml = metadata_api.parse(xml_filename, silence=True)
        bands = espa_xml.get_bands()

        # Gather all the filenames to be removed
        filenames = list()
        for band in bands.band:
            if band.product in product_list:
                # Add the .img file
                filenames.append(band.file_name)
                # Add the .hdr file
                hdr_filename = band.file_name.replace('.img', '.hdr')
                filenames.append(hdr_filename)

        # If we found some then remove them
        if len(filenames) > 0:
            # First remove from disk
            for filename in filenames:
                if os.path.exists(filename):
                    os.unlink(filename)

            # Second remove from metadata XML
            # Remove them from the XML by creating a new list of all the
            # others
            bands.band[:] = [
                band for band in bands.band if band.product not in product_list
            ]

            try:
                # Export to the file with validation
                with open(xml_filename, 'w') as xml_fd:
                    metadata_api.export(xml_fd, espa_xml)

            except Exception:
                raise

        del bands
        del espa_xml
    def remove_products(xml_filename, product_list):
        '''
        Description:
            Removes the specified products from the file system, as well as
            from the XML file.
        '''

        if not product_list:
            # We don't error, just nothing to do.
            return

        espa_xml = metadata_api.parse(xml_filename, silence=True)
        bands = espa_xml.get_bands()

        # Gather all the filenames to be removed
        filenames = list()
        for band in bands.band:
            if band.product in product_list:
                # Add the .img file
                filenames.append(band.file_name)
                # Add the .hdr file
                hdr_filename = band.file_name.replace('.img', '.hdr')
                filenames.append(hdr_filename)

        # If we found some then remove them
        if len(filenames) > 0:
            # First remove from disk
            for filename in filenames:
                if os.path.exists(filename):
                    os.unlink(filename)

            # Second remove from metadata XML
            # Remove them from the XML by creating a new list of all the
            # others
            bands.band[:] = [band for band in bands.band
                             if band.product not in product_list]

            try:
                # Export to the file with validation
                with open(xml_filename, 'w') as xml_fd:
                    metadata_api.export(xml_fd, espa_xml)

            except Exception:
                raise

        del bands
        del espa_xml
Пример #3
0
def warp_espa_data(parms, scene, xml_filename=None):
    '''
    Description:
      Warp each espa science product to the parameters specified in the parms
    '''

    logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER)

    # Validate the parameters
    validate_parameters(parms, scene)
    logger.debug(parms)

    # ------------------------------------------------------------------------
    # De-register the DOQ drivers since they may cause a problem with some of
    # our generated imagery.  And we are only processing envi format today
    # inside the processing code.
    doq1 = gdal.GetDriverByName('DOQ1')
    doq2 = gdal.GetDriverByName('DOQ2')
    doq1.Deregister()
    doq2.Deregister()
    # ------------------------------------------------------------------------

    # Verify something was provided for the XML filename
    if xml_filename is None or xml_filename == '':
        raise ee.ESPAException(ee.ErrorCodes.warping, "Missing XML Filename")

    # Change to the working directory
    current_directory = os.getcwd()
    os.chdir(parms['work_directory'])

    try:
        xml = metadata_api.parse(xml_filename, silence=True)
        bands = xml.get_bands()
        global_metadata = xml.get_global_metadata()
        satellite = global_metadata.get_satellite()

        # Might need this for the base warp command image extents
        original_proj4 = get_original_projection(bands.band[0].get_file_name())

        # Build the base warp command to use
        base_warp_command = \
            build_base_warp_command(parms, original_proj4=str(original_proj4))

        # Determine the user specified resample method
        user_resample_method = 'near'  # default
        if parms['resample_method'] is not None:
            user_resample_method = parms['resample_method']

        # Process through the bands in the XML file
        for band in bands.band:
            img_filename = band.get_file_name()
            hdr_filename = img_filename.replace('.img', '.hdr')
            logger.info("Processing %s" % img_filename)

            # Reset the resample method to the user specified value
            resample_method = user_resample_method

            # Always use near for qa bands
            category = band.get_category()
            if category == 'qa':
                resample_method = 'near'  # over-ride with 'near'

            # Update the XML metadata object for the resampling method used
            # Later update_espa_xml is used to update the XML file
            if resample_method == 'near':
                band.set_resample_method('nearest neighbor')
            if resample_method == 'bilinear':
                band.set_resample_method('bilinear')
            if resample_method == 'cubic':
                band.set_resample_method('cubic convolution')

            # Figure out the pixel size to use
            pixel_size = parms['pixel_size']

            # EXECUTIVE DECISION(Calli) - ESPA Issue 185
            #    - If the band is (Landsat 7 or 8) and Band 8 do not resize
            #      the pixels.
            if ((satellite == 'LANDSAT_7' or satellite == 'LANDSAT_8') and
                    band.get_name() == 'band8'):
                if parms['target_projection'] == 'lonlat':
                    pixel_size = settings.DEG_FOR_15_METERS
                else:
                    pixel_size = float(band.pixel_size.x)

            # Open the image to read the no data value out since the internal
            # ENVI driver for GDAL does not output it, even if it is known
            ds = gdal.Open(img_filename)
            if ds is None:
                raise RuntimeError("GDAL failed to open (%s)" % img_filename)

            ds_band = None
            try:
                ds_band = ds.GetRasterBand(1)
            except Exception as excep:
                raise ee.ESPAException(ee.ErrorCodes.warping,
                                       str(excep)), None, sys.exc_info()[2]

            # Save the no data value since gdalwarp does not write it out when
            # using the ENVI format
            no_data_value = ds_band.GetNoDataValue()
            if no_data_value is not None:
                # TODO - We don't process any floating point data types.  Yet
                # Convert to an integer then string
                no_data_value = str(int(no_data_value))

            # Force a freeing of the memory
            del ds_band
            del ds

            tmp_img_filename = 'tmp-%s' % img_filename
            tmp_hdr_filename = 'tmp-%s' % hdr_filename

            warp_image(img_filename, tmp_img_filename,
                       base_warp_command=base_warp_command,
                       resample_method=resample_method,
                       pixel_size=pixel_size,
                       no_data_value=no_data_value)

            ##################################################################
            ##################################################################
            # Get new everything for the re-projected band
            ##################################################################
            ##################################################################

            # Update the tmp ENVI header with our own values for some fields
            sb = StringIO()
            with open(tmp_hdr_filename, 'r') as tmp_fd:
                while True:
                    line = tmp_fd.readline()
                    if not line:
                        break
                    if (line.startswith('data ignore value') or
                            line.startswith('description')):
                        pass
                    else:
                        sb.write(line)

                    if line.startswith('description'):
                        # This may be on multiple lines so read lines until
                        # we find the closing brace
                        if not line.strip().endswith('}'):
                            while 1:
                                next_line = tmp_fd.readline()
                                if (not next_line or
                                        next_line.strip().endswith('}')):
                                    break
                        sb.write('description = {ESPA-generated file}\n')
                    elif (line.startswith('data type') and
                          (no_data_value is not None)):
                        sb.write('data ignore value = %s\n' % no_data_value)
            # END - with tmp_fd

            # Do the actual replace here
            with open(tmp_hdr_filename, 'w') as tmp_fd:
                tmp_fd.write(sb.getvalue())

            # Remove the original files, they are replaced in following code
            if os.path.exists(img_filename):
                os.unlink(img_filename)
            if os.path.exists(hdr_filename):
                os.unlink(hdr_filename)

            # Rename the temps file back to the original name
            os.rename(tmp_img_filename, img_filename)
            os.rename(tmp_hdr_filename, hdr_filename)
        # END for each band in the XML file

        # Update the XML to reflect the new warped output
        update_espa_xml(parms, xml, xml_filename)

        del xml

    except Exception as excep:
        raise ee.ESPAException(ee.ErrorCodes.warping,
                               str(excep)), None, sys.exc_info()[2]
    finally:
        # Change back to the previous directory
        os.chdir(current_directory)
Пример #4
0
    def createXML(self, scene_xml_file=None, output_xml_file=None,
        start_year=None, end_year=None, fill_value=None, imgfile=None,
        log_handler=None):
        """Creates an XML file for the products produced by
           runAnnualBurnSummaries.
        Description: routine to create the XML file for the burned area summary
            bands.  The sample scene-based XML file will be used as the basis
            for the projection information for the output XML file.  The image
            size, extents, etc. will need to be updated, as will the band
            information.
        
        History:
          Created on May 12, 2014 by Gail Schmidt, USGS/EROS LSRD Project

        Args:
          scene_xml_file - scene-based XML file to be used as the base XML
              information for the projection metadata.
          output_xml_file - name of the XML file to be written
          start_year - starting year of the scenes to process
          end_year - ending year of the scenes to process
          fill_value - fill or nodata value for this dataset
          imgfile - name of burned area image file with associated ENVI header
              which can be used to obtain the extents and geographic
              information for these products
          log_handler - handler for the logging information
   
        Returns:
            ERROR - error creating the XML file
            SUCCESS - successful creation of the XML file
        """

        # parse the scene-based XML file, just as a basis for the output XML
        # file.  the global attributes will be similar, but the extents and
        # size of the image will be different.  the bands will be based on the
        # bands that are output from this routine.
        xml = metadata_api.parse (scene_xml_file, silence=True)
        meta_bands = xml.get_bands()
        meta_global = xml.get_global_metadata()

        # update the global information
        meta_global.set_data_provider("USGS/EROS")
        meta_global.set_satellite("LANDSAT")
        meta_global.set_instrument("combination")
        del (meta_global.acquisition_date)
        meta_global.set_acquisition_date(None)

        # open the image file to obtain the geospatial and spatial reference
        # information
        ds = gdal.Open (imgfile)
        if ds is None:
            msg = "GDAL failed to open %s" % imgfile
            logIt (msg, log_handler)
            return ERROR

        ds_band = ds.GetRasterBand (1)
        if ds_band is None:
            msg = "GDAL failed to get the first band in %s" % imgfile
            logIt (msg, log_handler)
            return ERROR
        nlines = float(ds_band.YSize)
        nsamps = float(ds_band.XSize)
        nlines_int = ds_band.YSize 
        nsamps_int = ds_band.XSize 
        del (ds_band)

        ds_transform = ds.GetGeoTransform()
        if ds_transform is None:
            msg = "GDAL failed to get the geographic transform information " \
                "from %s" % imgfile
            logIt (msg, log_handler)
            return ERROR

        ds_srs = osr.SpatialReference()
        if ds_srs is None:
            msg = "GDAL failed to get the spatial reference information " \
                "from %s" % imgfile
            logIt (msg, log_handler)
            return ERROR
        ds_srs.ImportFromWkt (ds.GetProjection())
        del (ds)

        # get the UL and LR center of pixel map coordinates
        (map_ul_x, map_ul_y) = convert_imageXY_to_mapXY (0.5, 0.5,
            ds_transform)
        (map_lr_x, map_lr_y) = convert_imageXY_to_mapXY (
            nsamps - 0.5, nlines - 0.5, ds_transform)

        # update the UL and LR projection corners along with the origin of the
        # corners, for the center of the pixel (global projection information)
        for mycorner in meta_global.projection_information.corner_point:
            if mycorner.location == 'UL':
                mycorner.set_x (map_ul_x)
                mycorner.set_y (map_ul_y)
            if mycorner.location == 'LR':
                mycorner.set_x (map_lr_x)
                mycorner.set_y (map_lr_y)
        meta_global.projection_information.set_grid_origin("CENTER")

        # update the UL and LR latitude and longitude coordinates, using the
        # center of the pixel
        srs_lat_lon = ds_srs.CloneGeogCS()
        coord_tf = osr.CoordinateTransformation (ds_srs, srs_lat_lon)
        for mycorner in meta_global.corner:
            if mycorner.location == 'UL':
                (lon, lat, height) = \
                    coord_tf.TransformPoint (map_ul_x, map_ul_y)
                mycorner.set_longitude (lon)
                mycorner.set_latitude (lat)
            if mycorner.location == 'LR':
                (lon, lat, height) = \
                    coord_tf.TransformPoint (map_lr_x, map_lr_y)
                mycorner.set_longitude (lon)
                mycorner.set_latitude (lat)

        # determine the bounding coordinates; initialize using the UL and LR
        # then work around the scene edges
        # UL
        (map_x, map_y) = convert_imageXY_to_mapXY (0.0, 0.0, ds_transform)
        (ul_lon, ul_lat, height) = coord_tf.TransformPoint (map_x, map_y)
        # LR
        (map_x, map_y) = convert_imageXY_to_mapXY (nsamps, nlines, ds_transform)
        (lr_lon, lr_lat, height) = coord_tf.TransformPoint (map_x, map_y)

        # find the min and max values accordingly, for initialization
        west_lon = min (ul_lon, lr_lon)
        east_lon = max (ul_lon, lr_lon)
        north_lat = max (ul_lat, lr_lat)
        south_lat = min (ul_lat, lr_lat)

        # traverse the boundaries of the image to determine the bounding
        # coords; traverse one extra line and sample to get the the outer
        # extents of the image vs. just the UL of the outer edge.
        # top and bottom edges
        for samp in range (0, nsamps_int+1):
            # top edge
            (map_x, map_y) = convert_imageXY_to_mapXY (samp, 0.0, ds_transform)
            (top_lon, top_lat, height) = coord_tf.TransformPoint (map_x, map_y)

            # lower edge
            (map_x, map_y) = convert_imageXY_to_mapXY (samp, nlines,
                ds_transform)
            (low_lon, low_lat, height) = coord_tf.TransformPoint (map_x, map_y)

            # update the min and max values
            west_lon = min (top_lon, low_lon, west_lon)
            east_lon = max (top_lon, low_lon, east_lon)
            north_lat = max (top_lat, low_lat, north_lat)
            south_lat = min (top_lat, low_lat, south_lat)

        # left and right edges
        for line in range (0, nlines_int+1):
            # left edge
            (map_x, map_y) = convert_imageXY_to_mapXY (0.0, line, ds_transform)
            (left_lon, left_lat, height) = coord_tf.TransformPoint (map_x,
                map_y)

            # right edge
            (map_x, map_y) = convert_imageXY_to_mapXY (nsamps, line,
                ds_transform)
            (right_lon, right_lat, height) = coord_tf.TransformPoint (map_x,
                map_y)

            # update the min and max values
            west_lon = min (left_lon, right_lon, west_lon)
            east_lon = max (left_lon, right_lon, east_lon)
            north_lat = max (left_lat, right_lat, north_lat)
            south_lat = min (left_lat, right_lat, south_lat)

        # update the XML
        bounding_coords = meta_global.get_bounding_coordinates()
        bounding_coords.set_west (west_lon)
        bounding_coords.set_east (east_lon)
        bounding_coords.set_north (north_lat)
        bounding_coords.set_south (south_lat)

        del (ds_transform)
        del (ds_srs)

        # clear some of the global information that doesn't apply for these
        # products
        del (meta_global.scene_center_time)
        meta_global.set_scene_center_time(None)
        del (meta_global.lpgs_metadata_file)
        meta_global.set_lpgs_metadata_file(None)
        del (meta_global.orientation_angle)
        meta_global.set_orientation_angle(None)
        del (meta_global.level1_production_date)
        meta_global.set_level1_production_date(None)

        # clear the solar angles
        del (meta_global.solar_angles)
        meta_global.set_solar_angles(None)

        # save the first band and then wipe the bands out so that new bands
        # can be added for the burned area bands
        myband_save = meta_bands.band[0]
        del (meta_bands.band)
        meta_bands.band = []

        # create the band information; there are 4 output products per year
        # for the burned area dataset; add enough bands to cover the products
        # and years
        #    1. first date a burned area was observed (burned_area)
        #    2. number of times burn was observed (burn_count)
        #    3. number of good looks (good_looks_count)
        #    4. maximum probability for burned area (max_burn_prob)
        nproducts = 4
        nyears = end_year - start_year + 1
        nbands = nproducts * nyears
        for i in range (0, nbands):
            # add the new band
            myband = metadata_api.band()
            meta_bands.band.append(myband)

        # how many bands are there in the new XML file
        num_scene_bands =  len (meta_bands.band)
        print "New XML file has %d bands" % num_scene_bands

        # loop through the products and years to create the band metadata
        band_count = 0
        for product in range (1, nproducts+1):
            for year in range (start_year, end_year+1):
                myband = meta_bands.band[band_count]
                myband.set_product("burned_area")
                myband.set_short_name("LNDBA")
                myband.set_data_type("INT16")
                myband.set_pixel_size(myband_save.get_pixel_size())
                myband.set_fill_value(fill_value)
                myband.set_nlines(nlines)
                myband.set_nsamps(nsamps)
                myband.set_app_version(self.burned_area_version)
                production_date = time.strftime("%Y-%m-%dT%H:%M:%S",
                    time.gmtime())
                myband.set_production_date (  \
                    datetime_.datetime.strptime(production_date,
                    '%Y-%m-%dT%H:%M:%S'))

                # clear some of the band-specific fields that don't apply for
                # this product
                del (myband.source)
                myband.set_source(None)
                del (myband.saturate_value)
                myband.set_saturate_value(None)
                del (myband.scale_factor)
                myband.set_scale_factor(None)
                del (myband.add_offset)
                myband.set_add_offset(None)
                del (myband.toa_reflectance)
                myband.set_toa_reflectance(None)
                del (myband.bitmap_description)
                myband.set_bitmap_description(None)
                del (myband.class_values)
                myband.set_class_values(None)
                del (myband.qa_description)
                myband.set_qa_description(None)
                del (myband.calibrated_nt)
                myband.set_calibrated_nt(None)

                # handle the band-specific differences
                valid_range = metadata_api.valid_range()
                if product == 1:
                    name = "burned_area_%d" % year
                    long_name = "first DOY a burn was observed"
                    file_name = "burned_area_%d.img" % year
                    category = "image"
                    data_units = "day of year"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no burn observed" 

                elif product == 2:
                    name = "burn_count_%d" % year
                    long_name = "number of times a burn was observed"
                    file_name = "burn_count_%d.img" % year
                    category = "image"
                    data_units = "count"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no burn observed" 

                elif product == 3:
                    name = "good_looks_count_%d" % year
                    long_name = "number of good looks (pixels with good QA)"
                    file_name = "good_looks_count_%d.img" % year
                    category = "qa"
                    data_units = "count"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no valid pixels (water, cloud, " \
                        "snow, etc.)"

                elif product == 4:
                    name = "max_burn_prob_%d" % year
                    long_name = "maximum probability for burned area"
                    file_name = "max_burn_prob_%d.img" % year
                    category = "image"
                    data_units = "probability"
                    valid_range.min = 0
                    valid_range.max = 100
                    qa_description = "-9998: bad QA (water, cloud, snow, etc.)"

                myband.set_name(name)
                myband.set_long_name(long_name)
                myband.set_file_name(file_name)
                myband.set_category(category)
                myband.set_data_units(data_units)
                myband.set_valid_range(valid_range)
                myband.set_qa_description(qa_description)

                # increment the band counter
                band_count += 1

            # end for year
        # end for nproducts

        # write out a the XML file after validation
        # call the export with validation
        fd = open (output_xml_file, 'w')
        if fd == None:
            msg = "Unable to open the output XML file (%s) for writing." % \
                output_xml_file
            logIt (msg, log_handler)
            return ERROR

        metadata_api.export (fd, xml)
        fd.flush()
        fd.close()

        return SUCCESS
    def generate_product(self):
        '''
        Description:
            Provides the main processing algorithm for generating the
            estimated Landsat emissivity product.  It produces the final
            emissivity product.
        '''

        self.logger = logging.getLogger(__name__)

        self.logger.info('Start - Estimate Landsat Emissivity')

        try:
            self.retrieve_metadata_information()
        except Exception:
            self.logger.exception('Failed reading input XML metadata file')
            raise

        try:
            self.determine_sensor_specific_coefficients()
        except Exception:
            self.logger.exception('Failed determining sensor coefficients')
            raise

        # Register all the gdal drivers and choose the GeoTiff for our temp
        # output
        gdal.AllRegister()
        geotiff_driver = gdal.GetDriverByName('GTiff')
        envi_driver = gdal.GetDriverByName('ENVI')

        # ====================================================================
        # Build NDVI in memory
        self.logger.info('Building TOA based NDVI band for Landsat data')

        # NIR ----------------------------------------------------------------
        data_set = gdal.Open(self.toa_nir_name)
        x_dim = data_set.RasterXSize  # They are all the same size
        y_dim = data_set.RasterYSize
        ls_nir_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                            x_dim, y_dim)
        nir_no_data_locations = np.where(ls_nir_data == self.no_data_value)
        ls_nir_data = ls_nir_data * self.toa_nir_scale_factor

        # RED ----------------------------------------------------------------
        data_set = gdal.Open(self.toa_red_name)
        ls_red_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                            x_dim, y_dim)
        red_no_data_locations = np.where(ls_red_data == self.no_data_value)
        ls_red_data = ls_red_data * self.toa_red_scale_factor

        # NDVI ---------------------------------------------------------------
        ls_ndvi_data = ((ls_nir_data - ls_red_data) /
                        (ls_nir_data + ls_red_data))

        # Cleanup no data locations
        ls_ndvi_data[nir_no_data_locations] = self.no_data_value
        ls_ndvi_data[red_no_data_locations] = self.no_data_value

        if self.keep_intermediate_data:
            geo_transform = data_set.GetGeoTransform()
            ds_srs = osr.SpatialReference()
            ds_srs.ImportFromWkt(data_set.GetProjection())

        # Memory cleanup
        del ls_red_data
        del ls_nir_data
        del nir_no_data_locations
        del red_no_data_locations

        # ====================================================================
        # Build NDSI in memory
        self.logger.info('Building TOA based NDSI band for Landsat data')

        # GREEN --------------------------------------------------------------
        data_set = gdal.Open(self.toa_green_name)
        ls_green_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                              x_dim, y_dim)
        green_no_data_locations = (
            np.where(ls_green_data == self.no_data_value))
        ls_green_data = ls_green_data * self.toa_green_scale_factor

        # SWIR1 --------------------------------------------------------------
        data_set = gdal.Open(self.toa_swir1_name)
        ls_swir1_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                              x_dim, y_dim)
        swir1_no_data_locations = (
            np.where(ls_swir1_data == self.no_data_value))
        ls_swir1_data = ls_swir1_data * self.toa_swir1_scale_factor

        # Build the Landsat TOA NDSI data
        self.logger.info('Building TOA based NDSI for Landsat data')
        ls_ndsi_data = ((ls_green_data - ls_swir1_data) /
                        (ls_green_data + ls_swir1_data))

        # Cleanup no data locations
        ls_ndsi_data[green_no_data_locations] = self.no_data_value
        # Cleanup no data locations
        ls_ndsi_data[swir1_no_data_locations] = self.no_data_value

        # Memory cleanup
        del ls_green_data
        del ls_swir1_data
        del green_no_data_locations
        del swir1_no_data_locations

        # Save for the output products
        ds_tmp_srs = osr.SpatialReference()
        ds_tmp_srs.ImportFromWkt(data_set.GetProjection())
        ds_tmp_transform = data_set.GetGeoTransform()

        # Memory cleanup
        del data_set

        # Save the locations for the specfied snow pixels
        self.logger.info('Determine snow pixel locations')
        selected_snow_locations = np.where(ls_ndsi_data > 0.4)

        # Save ndvi and ndsi no data locations
        ndvi_no_data_locations = np.where(ls_ndvi_data == self.no_data_value)
        ndsi_no_data_locations = np.where(ls_ndsi_data == self.no_data_value)

        # Memory cleanup
        del ls_ndsi_data

        # Turn all negative values to zero
        # Use a realy small value so that we don't have negative zero (-0.0)
        ls_ndvi_data[ls_ndvi_data < 0.0000001] = 0

        if self.keep_intermediate_data:
            self.logger.info('Writing Landsat NDVI raster')
            util.Geo.generate_raster_file(geotiff_driver,
                                          'internal_landsat_ndvi.tif',
                                          ls_ndvi_data,
                                          x_dim, y_dim,
                                          geo_transform,
                                          ds_srs.ExportToWkt(),
                                          self.no_data_value,
                                          gdal.GDT_Float32)

        # Build the estimated Landsat EMIS data from the ASTER GED data and
        # warp it to the Landsat scenes projection and image extents
        # For convenience the ASTER NDVI is also extracted and warped to the
        # Landsat scenes projection and image extents
        self.logger.info('Build thermal emissivity band and'
                         ' retrieve ASTER NDVI')
        (ls_emis_warped_name,
         aster_ndvi_warped_name) = self.build_ls_emis_data(geotiff_driver)

        # Load the warped estimated Landsat EMIS into memory
        data_set = gdal.Open(ls_emis_warped_name)
        ls_emis_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                             x_dim, y_dim)
        ls_emis_gap_locations = np.where(ls_emis_data == 0)
        ls_emis_no_data_locations = (
            np.where(ls_emis_data == self.no_data_value))

        # Load the warped ASTER NDVI into memory
        data_set = gdal.Open(aster_ndvi_warped_name)
        aster_ndvi_data = data_set.GetRasterBand(1).ReadAsArray(0, 0,
                                                                x_dim, y_dim)
        aster_ndvi_gap_locations = np.where(aster_ndvi_data == 0)
        aster_ndvi_no_data_locations = (
            np.where(aster_ndvi_data == self.no_data_value))

        # Turn all negative values to zero
        # Use a realy small value so that we don't have negative zero (-0.0)
        aster_ndvi_data[aster_ndvi_data < 0.0000001] = 0

        # Memory cleanup
        del data_set

        if not self.keep_intermediate_data:
            # Cleanup the temp files since we have them in memory
            if os.path.exists(ls_emis_warped_name):
                os.unlink(ls_emis_warped_name)
            if os.path.exists(aster_ndvi_warped_name):
                os.unlink(aster_ndvi_warped_name)

        self.logger.info('Normalizing Landsat and ASTER NDVI')
        # Normalize Landsat NDVI by max value
        max_ls_ndvi = ls_ndvi_data.max()
        self.logger.info('Max LS NDVI {0}'.format(max_ls_ndvi))
        ls_ndvi_data = ls_ndvi_data / float(max_ls_ndvi)

        if self.keep_intermediate_data:
            self.logger.info('Writing Landsat NDVI NORM MAX raster')
            util.Geo.generate_raster_file(geotiff_driver,
                                          'internal_landsat_ndvi_norm_max.tif',
                                          ls_ndvi_data,
                                          x_dim, y_dim,
                                          geo_transform,
                                          ds_srs.ExportToWkt(),
                                          self.no_data_value,
                                          gdal.GDT_Float32)

        # Normalize ASTER NDVI by max value
        max_aster_ndvi = aster_ndvi_data.max()
        self.logger.info('Max ASTER NDVI {0}'.format(max_aster_ndvi))
        aster_ndvi_data = aster_ndvi_data / float(max_aster_ndvi)

        if self.keep_intermediate_data:
            self.logger.info('Writing Aster NDVI NORM MAX raster')
            util.Geo.generate_raster_file(geotiff_driver,
                                          'internal_aster_ndvi_norm_max.tif',
                                          aster_ndvi_data,
                                          x_dim, y_dim,
                                          geo_transform,
                                          ds_srs.ExportToWkt(),
                                          self.no_data_value,
                                          gdal.GDT_Float32)


        # Soil - From prototype code variable name
        ls_emis_final = ((ls_emis_data - 0.975 * aster_ndvi_data) /
                         (1.0 - aster_ndvi_data))

        # Memory cleanup
        del aster_ndvi_data
        del ls_emis_data

        # Adjust estimated Landsat EMIS for vegetation and snow, to generate
        # the final Landsat EMIS data
        self.logger.info('Adjusting estimated EMIS for vegetation')
        ls_emis_final = (self.vegetation_coeff * ls_ndvi_data +
                         ls_emis_final * (1.0 - ls_ndvi_data))

        # Medium snow
        self.logger.info('Adjusting estimated EMIS for snow')
        ls_emis_final[selected_snow_locations] = self.snow_emis_value

        # Memory cleanup
        del ls_ndvi_data
        del selected_snow_locations

        # Add the fill and scan gaps and ASTER gaps back into the results,
        # since they may have been lost
        self.logger.info('Adding fill and data gaps back into the estimated'
                         ' Landsat emissivity results')
        ls_emis_final[ls_emis_no_data_locations] = self.no_data_value
        ls_emis_final[ls_emis_gap_locations] = self.no_data_value
        ls_emis_final[aster_ndvi_no_data_locations] = self.no_data_value
        ls_emis_final[aster_ndvi_gap_locations] = self.no_data_value
        ls_emis_final[ndvi_no_data_locations] = self.no_data_value
        ls_emis_final[ndsi_no_data_locations] = self.no_data_value

        # Memory cleanup
        del ls_emis_no_data_locations
        del ls_emis_gap_locations
        del aster_ndvi_no_data_locations
        del aster_ndvi_gap_locations

        product_id = self.xml_filename.split('.xml')[0]
        ls_emis_img_filename = ''.join([product_id, '_emis', '.img'])
        ls_emis_hdr_filename = ''.join([product_id, '_emis', '.hdr'])
        ls_emis_aux_filename = ''.join([ls_emis_img_filename, '.aux', '.xml'])

        self.logger.info('Creating {0}'.format(ls_emis_img_filename))
        util.Geo.generate_raster_file(envi_driver, ls_emis_img_filename,
                                      ls_emis_final, x_dim, y_dim,
                                      ds_tmp_transform,
                                      ds_tmp_srs.ExportToWkt(),
                                      self.no_data_value, gdal.GDT_Float32)

        self.logger.info('Updating {0}'.format(ls_emis_hdr_filename))
        util.Geo.update_envi_header(ls_emis_hdr_filename, self.no_data_value)

        # Remove the *.aux.xml file generated by GDAL
        if os.path.exists(ls_emis_aux_filename):
            os.unlink(ls_emis_aux_filename)

        self.logger.info('Adding {0} to {1}'.format(ls_emis_img_filename,
                                                    self.xml_filename))
        # Add the estimated Landsat emissivity to the metadata XML
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        bands = espa_xml.get_bands()
        sensor_code = product_id[0:3]
        source_product = 'toa_refl'

        # Find the TOA Band 1 to use for the specific band details
        base_band = None
        for band in bands.band:
            if band.product == source_product and band.name == 'toa_band1':
                base_band = band

        if base_band is None:
            raise Exception('Failed to find the TOA BLUE band'
                            ' in the input data')

        emis_band = metadata_api.band(product='lst_temp',
                                      source=source_product,
                                      name='landsat_emis',
                                      category='image',
                                      data_type='FLOAT32',
                                      nlines=base_band.get_nlines(),
                                      nsamps=base_band.get_nsamps(),
                                      fill_value=str(self.no_data_value))

        emis_band.set_short_name('{0}EMIS'.format(sensor_code))
        emis_band.set_long_name('Landsat emissivity estimated from ASTER GED'
                                ' data')
        emis_band.set_file_name(ls_emis_img_filename)
        emis_band.set_data_units('Emissivity Coefficient')

        pixel_size = metadata_api.pixel_size(base_band.pixel_size.x,
                                             base_band.pixel_size.x,
                                             base_band.pixel_size.units)
        emis_band.set_pixel_size(pixel_size)

        valid_range = metadata_api.valid_range(min=0.0, max=1.0)
        emis_band.set_valid_range(valid_range)

        # Set the date, but first clean the microseconds off of it
        production_date = (
            datetime.datetime.strptime(datetime.datetime.now().
                                       strftime('%Y-%m-%dT%H:%M:%S'),
                                       '%Y-%m-%dT%H:%M:%S'))

        emis_band.set_production_date(production_date)

        emis_band.set_app_version(util.Version.app_version())

        bands.add_band(emis_band)

        # Write the XML metadata file out
        with open(self.xml_filename, 'w') as output_fd:
            metadata_api.export(output_fd, espa_xml)

        # Memory cleanup
        del ls_emis_final

        self.logger.info('Completed - Estimate Landsat Emissivity')
    def retrieve_metadata_information(self):
        '''
        Description:
            Loads and reads required information from the metadata XML file.
        '''

        # Read the XML metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        # Grab the global metadata object
        gm = espa_xml.get_global_metadata()
        # Grab the bands metadata object
        bands = espa_xml.get_bands()

        toa_bt_name = ''  # Only one that is local

        # Find the TOA bands to extract information from
        for band in bands.band:
            if band.product == 'toa_refl' and band.name == 'toa_band2':
                self.toa_green_name = band.get_file_name()
                self.toa_green_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band3':
                self.toa_red_name = band.get_file_name()
                self.toa_red_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band4':
                self.toa_nir_name = band.get_file_name()
                self.toa_nir_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band5':
                self.toa_swir1_name = band.get_file_name()
                self.toa_swir1_scale_factor = float(band.scale_factor)

            if band.product == 'toa_bt' and band.category == 'image':
                # Get the output pixel size
                self.ls_info.x_pixel_size = band.pixel_size.x
                self.ls_info.y_pixel_size = band.pixel_size.y

                toa_bt_name = band.get_file_name()

                # Get the output proj4 string
                self.ls_info.dest_proj4 = (
                    util.Geo.get_proj4_projection_string(toa_bt_name))

        # Error if we didn't find the required TOA bands in the data
        if len(self.toa_green_name) <= 0:
            raise Exception('Failed to find the TOA GREEN band'
                            ' in the input data')
        if len(self.toa_red_name) <= 0:
            raise Exception('Failed to find the TOA RED band'
                            ' in the input data')
        if len(self.toa_nir_name) <= 0:
            raise Exception('Failed to find the TOA NIR band'
                            ' in the input data')
        if len(self.toa_swir1_name) <= 0:
            raise Exception('Failed to find the TOA SWIR1 band'
                            ' in the input data')
        if len(toa_bt_name) <= 0:
            raise Exception('Failed to find the TOA BT band'
                            ' in the input data')

        # Determine the bounding geographic coordinates for the ASTER tiles we
        # will need
        self.ls_info.north = math.ceil(gm.bounding_coordinates.north)
        self.ls_info.south = math.floor(gm.bounding_coordinates.south)
        self.ls_info.east = math.ceil(gm.bounding_coordinates.east)
        self.ls_info.west = math.floor(gm.bounding_coordinates.west)

        # Determine the UTM projection corner points
        for cp in gm.projection_information.corner_point:
            if cp.location == 'UL':
                self.ls_info.min_x_extent = cp.x
                self.ls_info.max_y_extent = cp.y
            if cp.location == 'LR':
                self.ls_info.max_x_extent = cp.x
                self.ls_info.min_y_extent = cp.y

        # Adjust the UTM coordinates for image extents becuse they are in
        # center of pixel, and we need to supply the warping with actual
        # extents
        self.ls_info.min_x_extent = (self.ls_info.min_x_extent -
                                     self.ls_info.x_pixel_size * 0.5)
        self.ls_info.max_x_extent = (self.ls_info.max_x_extent +
                                     self.ls_info.x_pixel_size * 0.5)
        self.ls_info.min_y_extent = (self.ls_info.min_y_extent -
                                     self.ls_info.y_pixel_size * 0.5)
        self.ls_info.max_y_extent = (self.ls_info.max_y_extent +
                                     self.ls_info.y_pixel_size * 0.5)

        # Save for later
        self.satellite = gm.satellite

        del bands
        del gm
        del espa_xml
    def createXML(self,
                  scene_xml_file=None,
                  output_xml_file=None,
                  start_year=None,
                  end_year=None,
                  fill_value=None,
                  imgfile=None,
                  log_handler=None):
        """Creates an XML file for the products produced by
           runAnnualBurnSummaries.
        Description: routine to create the XML file for the burned area summary
            bands.  The sample scene-based XML file will be used as the basis
            for the projection information for the output XML file.  The image
            size, extents, etc. will need to be updated, as will the band
            information.
        
        History:
          Created on May 12, 2014 by Gail Schmidt, USGS/EROS LSRD Project

        Args:
          scene_xml_file - scene-based XML file to be used as the base XML
              information for the projection metadata.
          output_xml_file - name of the XML file to be written
          start_year - starting year of the scenes to process
          end_year - ending year of the scenes to process
          fill_value - fill or nodata value for this dataset
          imgfile - name of burned area image file with associated ENVI header
              which can be used to obtain the extents and geographic
              information for these products
          log_handler - handler for the logging information
   
        Returns:
            ERROR - error creating the XML file
            SUCCESS - successful creation of the XML file
        """

        # parse the scene-based XML file, just as a basis for the output XML
        # file.  the global attributes will be similar, but the extents and
        # size of the image will be different.  the bands will be based on the
        # bands that are output from this routine.
        xml = metadata_api.parse(scene_xml_file, silence=True)
        meta_bands = xml.get_bands()
        meta_global = xml.get_global_metadata()

        # update the global information
        meta_global.set_data_provider("USGS/EROS")
        meta_global.set_satellite("LANDSAT")
        meta_global.set_instrument("combination")
        del (meta_global.acquisition_date)
        meta_global.set_acquisition_date(None)

        # open the image file to obtain the geospatial and spatial reference
        # information
        ds = gdal.Open(imgfile)
        if ds is None:
            msg = "GDAL failed to open %s" % imgfile
            logIt(msg, log_handler)
            return ERROR

        ds_band = ds.GetRasterBand(1)
        if ds_band is None:
            msg = "GDAL failed to get the first band in %s" % imgfile
            logIt(msg, log_handler)
            return ERROR
        nlines = float(ds_band.YSize)
        nsamps = float(ds_band.XSize)
        nlines_int = ds_band.YSize
        nsamps_int = ds_band.XSize
        del (ds_band)

        ds_transform = ds.GetGeoTransform()
        if ds_transform is None:
            msg = "GDAL failed to get the geographic transform information " \
                "from %s" % imgfile
            logIt(msg, log_handler)
            return ERROR

        ds_srs = osr.SpatialReference()
        if ds_srs is None:
            msg = "GDAL failed to get the spatial reference information " \
                "from %s" % imgfile
            logIt(msg, log_handler)
            return ERROR
        ds_srs.ImportFromWkt(ds.GetProjection())
        del (ds)

        # get the UL and LR center of pixel map coordinates
        (map_ul_x, map_ul_y) = convert_imageXY_to_mapXY(0.5, 0.5, ds_transform)
        (map_lr_x,
         map_lr_y) = convert_imageXY_to_mapXY(nsamps - 0.5, nlines - 0.5,
                                              ds_transform)

        # update the UL and LR projection corners along with the origin of the
        # corners, for the center of the pixel (global projection information)
        for mycorner in meta_global.projection_information.corner_point:
            if mycorner.location == 'UL':
                mycorner.set_x(map_ul_x)
                mycorner.set_y(map_ul_y)
            if mycorner.location == 'LR':
                mycorner.set_x(map_lr_x)
                mycorner.set_y(map_lr_y)
        meta_global.projection_information.set_grid_origin("CENTER")

        # update the UL and LR latitude and longitude coordinates, using the
        # center of the pixel
        srs_lat_lon = ds_srs.CloneGeogCS()
        coord_tf = osr.CoordinateTransformation(ds_srs, srs_lat_lon)
        for mycorner in meta_global.corner:
            if mycorner.location == 'UL':
                (lon, lat, height) = \
                    coord_tf.TransformPoint (map_ul_x, map_ul_y)
                mycorner.set_longitude(lon)
                mycorner.set_latitude(lat)
            if mycorner.location == 'LR':
                (lon, lat, height) = \
                    coord_tf.TransformPoint (map_lr_x, map_lr_y)
                mycorner.set_longitude(lon)
                mycorner.set_latitude(lat)

        # determine the bounding coordinates; initialize using the UL and LR
        # then work around the scene edges
        # UL
        (map_x, map_y) = convert_imageXY_to_mapXY(0.0, 0.0, ds_transform)
        (ul_lon, ul_lat, height) = coord_tf.TransformPoint(map_x, map_y)
        # LR
        (map_x, map_y) = convert_imageXY_to_mapXY(nsamps, nlines, ds_transform)
        (lr_lon, lr_lat, height) = coord_tf.TransformPoint(map_x, map_y)

        # find the min and max values accordingly, for initialization
        west_lon = min(ul_lon, lr_lon)
        east_lon = max(ul_lon, lr_lon)
        north_lat = max(ul_lat, lr_lat)
        south_lat = min(ul_lat, lr_lat)

        # traverse the boundaries of the image to determine the bounding
        # coords; traverse one extra line and sample to get the the outer
        # extents of the image vs. just the UL of the outer edge.
        # top and bottom edges
        for samp in range(0, nsamps_int + 1):
            # top edge
            (map_x, map_y) = convert_imageXY_to_mapXY(samp, 0.0, ds_transform)
            (top_lon, top_lat, height) = coord_tf.TransformPoint(map_x, map_y)

            # lower edge
            (map_x, map_y) = convert_imageXY_to_mapXY(samp, nlines,
                                                      ds_transform)
            (low_lon, low_lat, height) = coord_tf.TransformPoint(map_x, map_y)

            # update the min and max values
            west_lon = min(top_lon, low_lon, west_lon)
            east_lon = max(top_lon, low_lon, east_lon)
            north_lat = max(top_lat, low_lat, north_lat)
            south_lat = min(top_lat, low_lat, south_lat)

        # left and right edges
        for line in range(0, nlines_int + 1):
            # left edge
            (map_x, map_y) = convert_imageXY_to_mapXY(0.0, line, ds_transform)
            (left_lon, left_lat,
             height) = coord_tf.TransformPoint(map_x, map_y)

            # right edge
            (map_x, map_y) = convert_imageXY_to_mapXY(nsamps, line,
                                                      ds_transform)
            (right_lon, right_lat,
             height) = coord_tf.TransformPoint(map_x, map_y)

            # update the min and max values
            west_lon = min(left_lon, right_lon, west_lon)
            east_lon = max(left_lon, right_lon, east_lon)
            north_lat = max(left_lat, right_lat, north_lat)
            south_lat = min(left_lat, right_lat, south_lat)

        # update the XML
        bounding_coords = meta_global.get_bounding_coordinates()
        bounding_coords.set_west(west_lon)
        bounding_coords.set_east(east_lon)
        bounding_coords.set_north(north_lat)
        bounding_coords.set_south(south_lat)

        del (ds_transform)
        del (ds_srs)

        # clear some of the global information that doesn't apply for these
        # products
        del (meta_global.scene_center_time)
        meta_global.set_scene_center_time(None)
        del (meta_global.lpgs_metadata_file)
        meta_global.set_lpgs_metadata_file(None)
        del (meta_global.orientation_angle)
        meta_global.set_orientation_angle(None)
        del (meta_global.level1_production_date)
        meta_global.set_level1_production_date(None)

        # clear the solar angles
        del (meta_global.solar_angles)
        meta_global.set_solar_angles(None)

        # save the first band and then wipe the bands out so that new bands
        # can be added for the burned area bands
        myband_save = meta_bands.band[0]
        del (meta_bands.band)
        meta_bands.band = []

        # create the band information; there are 4 output products per year
        # for the burned area dataset; add enough bands to cover the products
        # and years
        #    1. first date a burned area was observed (burned_area)
        #    2. number of times burn was observed (burn_count)
        #    3. number of good looks (good_looks_count)
        #    4. maximum probability for burned area (max_burn_prob)
        nproducts = 4
        nyears = end_year - start_year + 1
        nbands = nproducts * nyears
        for i in range(0, nbands):
            # add the new band
            myband = metadata_api.band()
            meta_bands.band.append(myband)

        # how many bands are there in the new XML file
        num_scene_bands = len(meta_bands.band)
        print "New XML file has %d bands" % num_scene_bands

        # loop through the products and years to create the band metadata
        band_count = 0
        for product in range(1, nproducts + 1):
            for year in range(start_year, end_year + 1):
                myband = meta_bands.band[band_count]
                myband.set_product("burned_area")
                myband.set_short_name("LNDBA")
                myband.set_data_type("INT16")
                myband.set_pixel_size(myband_save.get_pixel_size())
                myband.set_fill_value(fill_value)
                myband.set_nlines(nlines)
                myband.set_nsamps(nsamps)
                myband.set_app_version(self.burned_area_version)
                production_date = time.strftime("%Y-%m-%dT%H:%M:%S",
                                                time.gmtime())
                myband.set_production_date (  \
                    datetime_.datetime.strptime(production_date,
                    '%Y-%m-%dT%H:%M:%S'))

                # clear some of the band-specific fields that don't apply for
                # this product (see metadata_api.py for the full list of
                # band-related values under class band)
                del (myband.source)
                myband.set_source(None)
                del (myband.saturate_value)
                myband.set_saturate_value(None)
                del (myband.scale_factor)
                myband.set_scale_factor(None)
                del (myband.add_offset)
                myband.set_add_offset(None)
                del (myband.radiance)
                myband.set_radiance(None)
                del (myband.reflectance)
                myband.set_reflectance(None)
                del (myband.thermal_const)
                myband.set_thermal_const(None)
                del (myband.bitmap_description)
                myband.set_bitmap_description(None)
                del (myband.class_values)
                myband.set_class_values(None)
                del (myband.qa_description)
                myband.set_qa_description(None)
                del (myband.calibrated_nt)
                myband.set_calibrated_nt(None)

                # handle the band-specific differences
                valid_range = metadata_api.valid_range()
                if product == 1:
                    name = "burned_area_%d" % year
                    long_name = "first DOY a burn was observed"
                    file_name = "burned_area_%d.img" % year
                    category = "image"
                    data_units = "day of year"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no burn observed"

                elif product == 2:
                    name = "burn_count_%d" % year
                    long_name = "number of times a burn was observed"
                    file_name = "burn_count_%d.img" % year
                    category = "image"
                    data_units = "count"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no burn observed"

                elif product == 3:
                    name = "good_looks_count_%d" % year
                    long_name = "number of good looks (pixels with good QA)"
                    file_name = "good_looks_count_%d.img" % year
                    category = "qa"
                    data_units = "count"
                    valid_range.min = 0
                    valid_range.max = 366
                    qa_description = "0: no valid pixels (water, cloud, " \
                        "snow, etc.)"

                elif product == 4:
                    name = "max_burn_prob_%d" % year
                    long_name = "maximum probability for burned area"
                    file_name = "max_burn_prob_%d.img" % year
                    category = "image"
                    data_units = "probability"
                    valid_range.min = 0
                    valid_range.max = 100
                    qa_description = "-9998: bad QA (water, cloud, snow, etc.)"

                myband.set_name(name)
                myband.set_long_name(long_name)
                myband.set_file_name(file_name)
                myband.set_category(category)
                myband.set_data_units(data_units)
                myband.set_valid_range(valid_range)
                myband.set_qa_description(qa_description)

                # increment the band counter
                band_count += 1

            # end for year
        # end for nproducts

        # write out a the XML file after validation
        # call the export with validation
        fd = open(output_xml_file, 'w')
        if fd == None:
            msg = "Unable to open the output XML file (%s) for writing." % \
                output_xml_file
            logIt(msg, log_handler)
            return ERROR

        metadata_api.export(fd, xml)
        fd.flush()
        fd.close()

        return SUCCESS
Пример #8
0
    def generate_data(self):
        '''
        Description:
            Provides the main processing algorithm for building the Surface 
            Temperature product.  It produces the final ST product.
        '''

        try:
            self.retrieve_metadata_information()
        except Exception:
            self.logger.exception('Failed reading input XML metadata file')
            raise

        # Register all the gdal drivers and choose the ENVI for our output
        gdal.AllRegister()
        envi_driver = gdal.GetDriverByName('ENVI')

        # Read the bands into memory

        # Landsat Radiance at sensor for thermal band
        self.logger.info('Loading intermediate thermal band data [{0}]'.format(
            self.thermal_name))
        dataset = gdal.Open(self.thermal_name)
        x_dim = dataset.RasterXSize  # They are all the same size
        y_dim = dataset.RasterYSize

        thermal_data = dataset.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Atmospheric transmittance
        self.logger.info(
            'Loading intermediate transmittance band data [{0}]'.format(
                self.transmittance_name))
        dataset = gdal.Open(self.transmittance_name)
        trans_data = dataset.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Atmospheric path radiance - upwelled radiance
        self.logger.info(
            'Loading intermediate upwelled band data [{0}]'.format(
                self.upwelled_name))
        dataset = gdal.Open(self.upwelled_name)
        upwelled_data = dataset.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)

        self.logger.info('Calculating surface radiance')
        # Surface radiance
        with np.errstate(invalid='ignore'):
            surface_radiance = (thermal_data - upwelled_data) / trans_data

        # Fix the no data locations
        no_data_locations = np.where(thermal_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        no_data_locations = np.where(trans_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        no_data_locations = np.where(upwelled_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        # Memory cleanup
        del thermal_data
        del trans_data
        del upwelled_data
        del no_data_locations

        # Downwelling sky irradiance
        self.logger.info(
            'Loading intermediate downwelled band data [{0}]'.format(
                self.downwelled_name))
        dataset = gdal.Open(self.downwelled_name)
        downwelled_data = dataset.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)

        # Landsat emissivity estimated from ASTER GED data
        self.logger.info(
            'Loading intermediate emissivity band data [{0}]'.format(
                self.emissivity_name))
        dataset = gdal.Open(self.emissivity_name)
        emissivity_data = dataset.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)

        # Save for the output product
        ds_srs = osr.SpatialReference()
        ds_srs.ImportFromWkt(dataset.GetProjection())
        ds_transform = dataset.GetGeoTransform()

        # Memory cleanup
        del dataset

        # Estimate Earth-emitted radiance by subtracting off the reflected
        # downwelling component
        radiance = (surface_radiance -
                    (1.0 - emissivity_data) * downwelled_data)

        # Account for surface emissivity to get Plank emitted radiance
        self.logger.info('Calculating Plank emitted radiance')
        with np.errstate(invalid='ignore'):
            radiance_emitted = radiance / emissivity_data

        # Fix the no data locations
        no_data_locations = np.where(surface_radiance == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        no_data_locations = np.where(downwelled_data == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        no_data_locations = np.where(emissivity_data == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        # Memory cleanup
        del downwelled_data
        del emissivity_data
        del surface_radiance
        del radiance
        del no_data_locations

        # Use Brightness Temperature LUT to get skin temperature
        # Read the correct one for what we are processing
        if self.satellite == 'LANDSAT_8':
            self.logger.info('Using Landsat 8 Brightness Temperature LUT')
            bt_name = 'L8_Brightness_Temperature_LUT.txt'

        elif self.satellite == 'LANDSAT_7':
            self.logger.info('Using Landsat 7 Brightness Temperature LUT')
            bt_name = 'L7_Brightness_Temperature_LUT.txt'

        elif self.satellite == 'LANDSAT_5':
            self.logger.info('Using Landsat 5 Brightness Temperature LUT')
            bt_name = 'L5_Brightness_Temperature_LUT.txt'

        elif self.satellite == 'LANDSAT_4':
            self.logger.info('Using Landsat 4 Brightness Temperature LUT')
            bt_name = 'L4_Brightness_Temperature_LUT.txt'

        bt_data = np.loadtxt(os.path.join(self.st_data_dir, bt_name),
                             dtype=float,
                             delimiter=' ')
        bt_radiance_lut = bt_data[:, 1]
        bt_temp_lut = bt_data[:, 0]

        self.logger.info('Generating ST results')
        st_data = np.interp(radiance_emitted, bt_radiance_lut, bt_temp_lut)

        # Scale the result
        st_data = st_data * MULT_FACTOR

        # Add the fill and scan gaps back into the results, since they may
        # have been lost
        self.logger.info('Adding fill and data gaps back into the Surface'
                         ' Temperature results')

        # Fix the no data locations
        no_data_locations = np.where(radiance_emitted == self.no_data_value)
        st_data[no_data_locations] = self.no_data_value

        # Memory cleanup
        del radiance_emitted
        del no_data_locations

        product_id = self.xml_filename.split('.xml')[0]
        st_img_filename = ''.join([product_id, '_st', '.img'])
        st_hdr_filename = ''.join([product_id, '_st', '.hdr'])
        st_aux_filename = ''.join([st_img_filename, '.aux', '.xml'])

        self.logger.info('Creating {0}'.format(st_img_filename))
        util.Geo.generate_raster_file(envi_driver, st_img_filename, st_data,
                                      x_dim, y_dim, ds_transform,
                                      ds_srs.ExportToWkt(), self.no_data_value,
                                      gdal.GDT_Int16)

        self.logger.info('Updating {0}'.format(st_hdr_filename))
        util.Geo.update_envi_header(st_hdr_filename, self.no_data_value)

        # Memory cleanup
        del ds_srs
        del ds_transform

        # Remove the *.aux.xml file generated by GDAL
        if os.path.exists(st_aux_filename):
            os.unlink(st_aux_filename)

        self.logger.info('Adding {0} to {1}'.format(st_img_filename,
                                                    self.xml_filename))
        # Add the estimated Surface Temperature product to the metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        bands = espa_xml.get_bands()
        sensor_code = product_id[0:4]

        # Find the TOA Band 1 to use for the specific band details
        base_band = None
        for band in bands.band:
            if band.product == 'toa_refl' and band.name == 'toa_band1':
                base_band = band

        if base_band is None:
            raise Exception('Failed to find the TOA band 1'
                            ' in the input data')

        st_band = metadata_api.band(product='st',
                                    source='toa_refl',
                                    name='surface_temperature',
                                    category='image',
                                    data_type='INT16',
                                    scale_factor=SCALE_FACTOR,
                                    add_offset=0,
                                    nlines=base_band.get_nlines(),
                                    nsamps=base_band.get_nsamps(),
                                    fill_value=str(self.no_data_value))

        st_band.set_short_name('{0}ST'.format(sensor_code))
        st_band.set_long_name('Surface Temperature')
        st_band.set_file_name(st_img_filename)
        st_band.set_data_units('temperature (kelvin)')

        pixel_size = metadata_api.pixel_size(base_band.pixel_size.x,
                                             base_band.pixel_size.x,
                                             base_band.pixel_size.units)
        st_band.set_pixel_size(pixel_size)

        st_band.set_resample_method('none')

        valid_range = metadata_api.valid_range(min=1500, max=3730)
        st_band.set_valid_range(valid_range)

        # Set the date, but first clean the microseconds off of it
        production_date = (datetime.datetime.strptime(
            datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
            '%Y-%m-%dT%H:%M:%S'))

        st_band.set_production_date(production_date)

        st_band.set_app_version(util.Version.app_version())

        bands.add_band(st_band)

        # Write the XML metadata file out
        with open(self.xml_filename, 'w') as metadata_fd:
            metadata_api.export(metadata_fd, espa_xml)

        # Memory cleanup
        del st_band
        del bands
        del espa_xml
        del st_data
Пример #9
0
#! /usr/bin/env python

import datetime
from lxml import etree

import metadata_api

bands_element_path = '{http://espa.cr.usgs.gov/v1.0}bands'

xml = metadata_api.parse('LT50460282002042EDC01.xml', silence=True)

bands = xml.get_bands()

# Remove the L1T bands by creating a new list of all the others
bands.band[:] = [band for band in bands.band if band.product != 'L1T']

band = metadata_api.band(product="RDD",
                         name="band1",
                         category="image",
                         data_type="UINT8",
                         nlines="7321",
                         nsamps="7951",
                         fill_value="0")

band.set_short_name("LT5DN")
band.set_long_name("band 1 digital numbers")
band.set_file_name("LT50460282002042EDC01_B1.img")

pixel_size = metadata_api.pixel_size("30.000000", 30, "meters")
band.set_pixel_size(pixel_size)
    def retrieve_metadata_information(self):
        '''
        Description:
            Loads and reads required information from the metadata XML file.
        '''

        # Read the XML metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        # Grab the global metadata object
        gm = espa_xml.get_global_metadata()
        # Grab the bands metadata object
        bands = espa_xml.get_bands()

        self.thermal_name = ''
        self.transmittance_name = ''
        self.upwelled_name = ''
        self.downwelled_name = ''
        self.emissivity_name = ''

        # Find the TOA bands to extract information from
        for band in bands.band:
            if (band.product == 'lst_temp' and
                    band.name == 'lst_thermal_radiance'):
                self.thermal_name = band.get_file_name()

            if (band.product == 'lst_temp' and
                    band.name == 'lst_atmospheric_transmittance'):
                self.transmittance_name = band.get_file_name()

            if (band.product == 'lst_temp' and
                    band.name == 'lst_upwelled_radiance'):
                self.upwelled_name = band.get_file_name()

            if (band.product == 'lst_temp' and
                    band.name == 'lst_downwelled_radiance'):
                self.downwelled_name = band.get_file_name()

            if (band.product == 'lst_temp' and
                    band.name == 'landsat_emis'):
                self.emissivity_name = band.get_file_name()

        # Error if we didn't find the required TOA bands in the data
        if len(self.thermal_name) <= 0:
            raise Exception('Failed to find the lst_thermal_radiance band'
                            ' in the input data')
        if len(self.transmittance_name) <= 0:
            raise Exception('Failed to find the lst_atmospheric_transmittance'
                            ' in the input data')
        if len(self.upwelled_name) <= 0:
            raise Exception('Failed to find the lst_upwelled_radiance'
                            ' in the input data')
        if len(self.downwelled_name) <= 0:
            raise Exception('Failed to find the lst_downwelled_radiance'
                            ' in the input data')
        if len(self.emissivity_name) <= 0:
            raise Exception('Failed to find the landsat_emis'
                            ' in the input data')

        # Save for later
        self.satellite = gm.satellite

        del (bands)
        del (gm)
        del (espa_xml)
    def extract_aux_data(self):
        '''
        Description:
            Builds the strings required to locate the auxillary data in the
            archive then extracts the parameters into parameter named
            directories.
        '''

        xml = metadata_api.parse(self.xml_filename, silence=True)
        global_metadata = xml.get_global_metadata()
        acq_date = str(global_metadata.get_acquisition_date())
        scene_center_time = str(global_metadata.get_scene_center_time())

        # Extract the individual parts from the date
        year = int(acq_date[:4])
        month = int(acq_date[5:7])
        day = int(acq_date[8:])

        # Extract the hour parts from the time and convert to an int
        hour = int(scene_center_time[:2])
        self.logger.debug('Using Acq. Date = {0} {1} {2}'
                          .format(year, month, day))
        self.logger.debug('Using Scene Center Hour = {0:0>2}'.format(hour))

        del global_metadata
        del xml

        # Determine the 3hr increments to use from the auxillary data
        # We want the one before and after the scene acquisition time
        # and convert back to formatted strings
        hour_1 = hour - (hour % 3)
        t_delta = timedelta(hours=3)  # allows easy advance to the next day

        date_1 = datetime(year, month, day, hour_1)
        date_2 = date_1 + t_delta
        self.logger.debug('Date 1 = {0}'.format(str(date_1)))
        self.logger.debug('Date 2 = {0}'.format(str(date_2)))

        for parm in self.parms_to_extract:
            # Build the source filenames for date 1
            filename = self.aux_name_template.format(parm,
                                                     date_1.year,
                                                     date_1.month,
                                                     date_1.day,
                                                     date_1.hour * 100,
                                                     'hdr')

            aux_path = (self.aux_path_template.format(date_1.year,
                                                      date_1.month,
                                                      date_1.day))

            hdr_1_path = self.dir_template.format(aux_path, filename)

            grb_1_path = hdr_1_path.replace('.hdr', '.grb')

            self.logger.info('Using {0}'.format(hdr_1_path))
            self.logger.info('Using {0}'.format(grb_1_path))

            # Build the source filenames for date 2
            filename = self.aux_name_template.format(parm,
                                                     date_2.year,
                                                     date_2.month,
                                                     date_2.day,
                                                     date_2.hour * 100,
                                                     'hdr')

            aux_path = (self.aux_path_template.format(date_2.year,
                                                      date_2.month,
                                                      date_2.day))

            hdr_2_path = self.dir_template.format(aux_path, filename)

            grb_2_path = hdr_2_path.replace('.hdr', '.grb')

            self.logger.info('Using {0}'.format(hdr_2_path))
            self.logger.info('Using {0}'.format(grb_2_path))

            # Verify that the files we need exist
            if (not os.path.exists(hdr_1_path) or
                    not os.path.exists(hdr_2_path) or
                    not os.path.exists(grb_1_path) or
                    not os.path.exists(grb_2_path)):
                raise Exception('Required LST AUX files are missing')

            # Date 1
            output_dir = '{0}_1'.format(parm)
            self.extract_grib_data(hdr_1_path, grb_1_path, output_dir)

            # Date 2
            output_dir = '{0}_2'.format(parm)
            self.extract_grib_data(hdr_2_path, grb_2_path, output_dir)
    def generate_data(self):
        '''
        Description:
            Provides the main processing algorithm for building the Land
            Surface Temperature product.  It produces the final LST product.
        '''

        try:
            self.retrieve_metadata_information()
        except Exception:
            self.logger.exception('Failed reading input XML metadata file')
            raise

        # Register all the gdal drivers and choose the ENVI for our output
        gdal.AllRegister()
        envi_driver = gdal.GetDriverByName('ENVI')

        # Read the bands into memory

        # Landsat Radiance at sensor for thermal band
        self.logger.info('Loading intermediate thermal band data')
        ds = gdal.Open(self.thermal_name)
        x_dim = ds.RasterXSize  # They are all the same size
        y_dim = ds.RasterYSize
        thermal_data = ds.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Atmospheric transmittance
        self.logger.info('Loading intermediate transmittance band data')
        ds = gdal.Open(self.transmittance_name)
        trans_data = ds.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Atmospheric path radiance - upwelled radiance
        self.logger.info('Loading intermediate upwelled band data')
        ds = gdal.Open(self.upwelled_name)
        upwelled_data = ds.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        self.logger.info('Calculating surface radiance')
        # Surface radiance
        surface_radiance = (thermal_data - upwelled_data) / trans_data

        # Fix the no data locations
        no_data_locations = np.where(thermal_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        no_data_locations = np.where(trans_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        no_data_locations = np.where(upwelled_data == self.no_data_value)
        surface_radiance[no_data_locations] = self.no_data_value

        # Memory cleanup
        del (thermal_data)
        del (trans_data)
        del (upwelled_data)
        del (no_data_locations)

        # Downwelling sky irradiance
        self.logger.info('Loading intermediate downwelled band data')
        ds = gdal.Open(self.downwelled_name)
        downwelled_data = ds.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Landsat emissivity estimated from ASTER GED data
        self.logger.info('Loading intermediate emissivity band data')
        ds = gdal.Open(self.emissivity_name)
        emissivity_data = ds.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)

        # Save for the output product
        ds_srs = osr.SpatialReference()
        ds_srs.ImportFromWkt(ds.GetProjection())
        ds_transform = ds.GetGeoTransform()

        # Memory cleanup
        del (ds)

        # Estimate Earth-emitted radiance by subtracting off the reflected
        # downwelling component
        radiance = (surface_radiance -
                    (1.0 - emissivity_data) * downwelled_data)

        # Account for surface emissivity to get Plank emitted radiance
        self.logger.info('Calculating Plank emitted radiance')
        radiance_emitted = radiance / emissivity_data

        # Fix the no data locations
        no_data_locations = np.where(surface_radiance == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        no_data_locations = np.where(downwelled_data == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        no_data_locations = np.where(emissivity_data == self.no_data_value)
        radiance_emitted[no_data_locations] = self.no_data_value

        # Memory cleanup
        del (downwelled_data)
        del (emissivity_data)
        del (surface_radiance)
        del (radiance)
        del (no_data_locations)

        # Use Brightness Temperature LUT to get skin temperature
        # Read the correct one for what we are processing
        if self.satellite == 'LANDSAT_7':
            self.logger.info('Using Landsat 7 Brightness Temperature LUT')
            bt_name = 'L7_Brightness_Temperature_LUT.txt'

        elif self.satellite == 'LANDSAT_5':
            self.logger.info('Using Landsat 5 Brightness Temperature LUT')
            bt_name = 'L5_Brightness_Temperature_LUT.txt'

        bt_data = np.loadtxt(os.path.join(self.lst_data_dir, bt_name),
                             dtype=float, delimiter=' ')
        bt_radiance_LUT = bt_data[:, 1]
        bt_temp_LUT = bt_data[:, 0]

        self.logger.info('Generating LST results')
        lst_data = np.interp(radiance_emitted, bt_radiance_LUT, bt_temp_LUT)

        # Scale the result and convert it to an int16
        lst_data = lst_data * MULT_FACTOR
        lst_fata = lst_data.astype(np.int16)

        # Add the fill and scan gaps back into the results, since they may
        # have been lost
        self.logger.info('Adding fill and data gaps back into the Land'
                         ' Surface Temperature results')

        # Fix the no data locations
        no_data_locations = np.where(radiance_emitted == self.no_data_value)
        lst_data[no_data_locations] = self.no_data_value

        # Memory cleanup
        del (radiance_emitted)
        del (no_data_locations)

        product_id = self.xml_filename.split('.xml')[0]
        lst_img_filename = ''.join([product_id, '_lst', '.img'])
        lst_hdr_filename = ''.join([product_id, '_lst', '.hdr'])
        lst_aux_filename = ''.join([lst_img_filename, '.aux', '.xml'])

        self.logger.info('Creating {0}'.format(lst_img_filename))
        util.Geo.generate_raster_file(envi_driver, lst_img_filename,
                                      lst_data, x_dim, y_dim, ds_transform,
                                      ds_srs.ExportToWkt(), self.no_data_value,
                                      gdal.GDT_Int16)

        self.logger.info('Updating {0}'.format(lst_hdr_filename))
        util.Geo.update_envi_header(lst_hdr_filename, self.no_data_value)

        # Memory cleanup
        del (ds_srs)
        del (ds_transform)

        # Remove the *.aux.xml file generated by GDAL
        if os.path.exists(lst_aux_filename):
            os.unlink(lst_aux_filename)

        self.logger.info('Adding {0} to {1}'.format(lst_img_filename,
                                                    self.xml_filename))
        # Add the estimated Land Surface Temperature product to the metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        bands = espa_xml.get_bands()
        sensor_code = product_id[0:3]

        # Find the TOA Band 1 to use for the specific band details
        base_band = None
        for band in bands.band:
            if band.product == 'toa_refl' and band.name == 'toa_band1':
                base_band = band

        if base_band is None:
            raise Exception('Failed to find the TOA BLUE band'
                            ' in the input data')

        lst_band = metadata_api.band(product='lst',
                                     source='toa_refl',
                                     name='land_surface_temperature',
                                     category='image',
                                     data_type='INT16',
                                     scale_factor=SCALE_FACTOR,
                                     add_offset=0,
                                     nlines=base_band.get_nlines(),
                                     nsamps=base_band.get_nsamps(),
                                     fill_value=str(self.no_data_value))

        lst_band.set_short_name('{0}LST'.format(sensor_code))
        lst_band.set_long_name('Land Surface Temperature')
        lst_band.set_file_name(lst_img_filename)
        lst_band.set_data_units('temperature (kelvin)')

        pixel_size = metadata_api.pixel_size(base_band.pixel_size.x,
                                             base_band.pixel_size.x,
                                             base_band.pixel_size.units)
        lst_band.set_pixel_size(pixel_size)

        valid_range = metadata_api.valid_range(min=1500, max=3730)
        lst_band.set_valid_range(valid_range)

        # Set the date, but first clean the microseconds off of it
        production_date = (
            datetime.datetime.strptime(datetime.datetime.now().
                                       strftime('%Y-%m-%dT%H:%M:%S'),
                                       '%Y-%m-%dT%H:%M:%S'))

        lst_band.set_production_date(production_date)

        lst_band.set_app_version(util.Version.app_version())

        bands.add_band(lst_band)

        # Write the XML metadata file out
        with open(self.xml_filename, 'w') as fd:
            metadata_api.export(fd, espa_xml)

        # Memory cleanup
        del (lst_band)
        del (bands)
        del (espa_xml)
        del (lst_data)
Пример #13
0
import metadata_api

if __name__ == '__main__':
    # Create a command line argument parser
    description = "Validate an ESPA XML using two methods"
    parser = ArgumentParser (description=description)

    parser.add_argument ('--xml-file',
        action='store', dest='xml_file', required=True,
        help="ESPA XML file to validate")

    # Parse the command line arguments
    args = parser.parse_args()

    xml = metadata_api.parse (args.xml_file, silence=True)


    # Export with validation
    f = open ('val_01-' + args.xml_file, 'w')
    # Create the file and specify the namespace/schema
    metadata_api.export (f, xml)
    f.close()


    # LXML - Validation Example
    try:
        f = open ('../../../htdocs/schema/espa_internal_metadata_v1_0.xsd')
        schema_root = etree.parse (f)
        f.close()
        schema = etree.XMLSchema (schema_root)
Пример #14
0
#! /usr/bin/env python

import datetime
from lxml import etree

import metadata_api


bands_element_path = '{http://espa.cr.usgs.gov/v1.0}bands'

xml = metadata_api.parse('LT50460282002042EDC01.xml', silence=True)

bands = xml.get_bands()

# Remove the L1T bands by creating a new list of all the others
bands.band[:] = [band for band in bands.band if band.product != 'L1T']

band = metadata_api.band(product="RDD", name="band1", category="image",
    data_type="UINT8", nlines="7321", nsamps="7951", fill_value="0")

band.set_short_name ("LT5DN")
band.set_long_name ("band 1 digital numbers")
band.set_file_name ("LT50460282002042EDC01_B1.img")

pixel_size = metadata_api.pixel_size ("30.000000", 30, "meters")
band.set_pixel_size (pixel_size)

band.set_data_units ("digital numbers")

valid_range = metadata_api.valid_range (min="1", max=255)
band.set_valid_range (valid_range)
def extract_aux_data(args, base_aux_dir):
    '''
    Description:
        Builds the strings required to locate the auxillary data in the
        archive then extracts the parameters into paremeter named directories.
    '''

    logger = logging.getLogger(__name__)

    xml = metadata_api.parse(args.xml_filename, silence=True)
    global_metadata = xml.get_global_metadata()
    acq_date = str(global_metadata.get_acquisition_date())
    scene_center_time = str(global_metadata.get_scene_center_time())

    # Extract the individual parts from the date
    year = int(acq_date[:4])
    month = int(acq_date[5:7])
    day = int(acq_date[8:])

    # Extract the hour parts from the time and convert to an int
    hour = int(scene_center_time[:2])
    logger.debug("Using Acq. Date = {0} {1} {2}".format(year, month, day))
    logger.debug("Using Scene Center Hour = {0:0>2}".format(hour))

    del (global_metadata)
    del (xml)

    # Determine the 3hr increments to use from the auxillary data
    # We want the one before and after the scene acquisition time
    # and convert back to formatted strings
    hour_1 = hour - (hour % 3)
    td = timedelta(hours=3)  # allows us to easily advance to the next day

    date_1 = datetime(year, month, day, hour_1)
    date_2 = date_1 + td
    logger.debug("Date 1 = {0}".format(str(date_1)))
    logger.debug("Date 2 = {0}".format(str(date_2)))

    parms_to_extract = ['HGT', 'SPFH', 'TMP']
    AUX_PATH_TEMPLATE = '{0:0>4}/{1:0>2}/{2:0>2}'
    AUX_NAME_TEMPLATE = 'narr-a_221_{0}_{1:0>2}00_000_{2}.{3}'

    for parm in parms_to_extract:
        # Build the source filenames for date 1
        yyyymmdd = '{0:0>4}{1:0>2}{2:0>2}'.format(date_1.year,
                                                  date_1.month,
                                                  date_1.day)
        logger.debug("Date 1 yyyymmdd = {0}".format(yyyymmdd))

        hdr_1_name = AUX_NAME_TEMPLATE.format(yyyymmdd, date_1.hour,
                                              parm, 'hdr')
        grb_1_name = AUX_NAME_TEMPLATE.format(yyyymmdd, date_1.hour,
                                              parm, 'grb')
        logger.debug("hdr 1 = {0}".format(hdr_1_name))
        logger.debug("grb 1 = {0}".format(grb_1_name))

        tmp = AUX_PATH_TEMPLATE.format(date_1.year, date_1.month, date_1.day)
        hdr_1_path = '{0}/{1}/{2}'.format(base_aux_dir, tmp, hdr_1_name)
        grb_1_path = '{0}/{1}/{2}'.format(base_aux_dir, tmp, grb_1_name)
        logger.info("Using {0}".format(hdr_1_path))
        logger.info("Using {0}".format(grb_1_path))

        # Build the source filenames for date 2
        yyyymmdd = '{0:0>4}{1:0>2}{2:0>2}'.format(date_2.year,
                                                  date_2.month,
                                                  date_2.day)
        logger.debug("Date 2 yyyymmdd = {0}".format(yyyymmdd))

        hdr_2_name = AUX_NAME_TEMPLATE.format(yyyymmdd, date_2.hour,
                                              parm, 'hdr')
        grb_2_name = AUX_NAME_TEMPLATE.format(yyyymmdd, date_2.hour,
                                              parm, 'grb')
        logger.debug("hdr 2 = {0}".format(hdr_2_name))
        logger.debug("grb 2 = {0}".format(grb_2_name))

        tmp = AUX_PATH_TEMPLATE.format(date_2.year, date_2.month, date_2.day)
        hdr_2_path = '{0}/{1}/{2}'.format(base_aux_dir, tmp, hdr_2_name)
        grb_2_path = '{0}/{1}/{2}'.format(base_aux_dir, tmp, grb_2_name)
        logger.info("Using {0}".format(hdr_2_path))
        logger.info("Using {0}".format(grb_2_path))

        # Verify that the files we need exist
        if (not os.path.exists(hdr_1_path)
                or not os.path.exists(hdr_2_path)
                or not os.path.exists(grb_1_path)
                or not os.path.exists(grb_2_path)):
            raise Exception("Required LST AUX files are missing")

        output_dir = '{0}_1'.format(parm)
        extract_grib_data(hdr_1_path, grb_1_path, output_dir)
        output_dir = '{0}_2'.format(parm)
        extract_grib_data(hdr_2_path, grb_2_path, output_dir)
    def retrieve_metadata_information(self):
        '''
        Description:
            Loads and reads required information from the metadata XML file.
        '''

        # Read the XML metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        # Grab the global metadata object
        gm = espa_xml.get_global_metadata()
        # Grab the bands metadata object
        bands = espa_xml.get_bands()

        toa_bt_name = ''  # Only one that is local

        # Find the TOA bands to extract information from
        for band in bands.band:
            if band.product == 'toa_refl' and band.name == 'toa_band2':
                self.toa_green_name = band.get_file_name()
                self.toa_green_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band3':
                self.toa_red_name = band.get_file_name()
                self.toa_red_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band4':
                self.toa_nir_name = band.get_file_name()
                self.toa_nir_scale_factor = float(band.scale_factor)

            if band.product == 'toa_refl' and band.name == 'toa_band5':
                self.toa_swir1_name = band.get_file_name()
                self.toa_swir1_scale_factor = float(band.scale_factor)

            if band.product == 'toa_bt' and band.category == 'image':
                # Get the output pixel size
                self.ls_info.x_pixel_size = band.pixel_size.x
                self.ls_info.y_pixel_size = band.pixel_size.y

                toa_bt_name = band.get_file_name()

                # Get the output proj4 string
                self.ls_info.dest_proj4 = (
                    util.Geo.get_proj4_projection_string(toa_bt_name))

        # Error if we didn't find the required TOA bands in the data
        if len(self.toa_green_name) <= 0:
            raise Exception('Failed to find the TOA GREEN band'
                            ' in the input data')
        if len(self.toa_red_name) <= 0:
            raise Exception('Failed to find the TOA RED band'
                            ' in the input data')
        if len(self.toa_nir_name) <= 0:
            raise Exception('Failed to find the TOA NIR band'
                            ' in the input data')
        if len(self.toa_swir1_name) <= 0:
            raise Exception('Failed to find the TOA SWIR1 band'
                            ' in the input data')
        if len(toa_bt_name) <= 0:
            raise Exception('Failed to find the TOA BT band'
                            ' in the input data')

        # Determine the bounding geographic coordinates for the ASTER tiles we
        # will need
        self.ls_info.north = math.ceil(gm.bounding_coordinates.north)
        self.ls_info.south = math.floor(gm.bounding_coordinates.south)
        self.ls_info.east = math.ceil(gm.bounding_coordinates.east)
        self.ls_info.west = math.floor(gm.bounding_coordinates.west)

        # Determine the UTM projection corner points
        for cp in gm.projection_information.corner_point:
            if cp.location == 'UL':
                self.ls_info.min_x_extent = cp.x
                self.ls_info.max_y_extent = cp.y
            if cp.location == 'LR':
                self.ls_info.max_x_extent = cp.x
                self.ls_info.min_y_extent = cp.y

        # Adjust the UTM coordinates for image extents becuse they are in
        # center of pixel, and we need to supply the warping with actual
        # extents
        self.ls_info.min_x_extent = (self.ls_info.min_x_extent -
                                     self.ls_info.x_pixel_size * 0.5)
        self.ls_info.max_x_extent = (self.ls_info.max_x_extent +
                                     self.ls_info.x_pixel_size * 0.5)
        self.ls_info.min_y_extent = (self.ls_info.min_y_extent -
                                     self.ls_info.y_pixel_size * 0.5)
        self.ls_info.max_y_extent = (self.ls_info.max_y_extent +
                                     self.ls_info.y_pixel_size * 0.5)

        # Save for later
        self.satellite = gm.satellite

        del bands
        del gm
        del espa_xml
Пример #17
0
    def extract_aux_data(self):
        '''
        Description:
            Builds the strings required to locate the auxillary data in the
            archive then extracts the parameters into parameter named
            directories.
        '''

        xml = metadata_api.parse(self.xml_filename, silence=True)
        global_metadata = xml.get_global_metadata()
        acq_date = str(global_metadata.get_acquisition_date())
        scene_center_time = str(global_metadata.get_scene_center_time())

        # Extract the individual parts from the date
        year = int(acq_date[:4])
        month = int(acq_date[5:7])
        day = int(acq_date[8:])

        # Extract the hour parts from the time and convert to an int
        hour = int(scene_center_time[:2])
        self.logger.debug('Using Acq. Date = {0} {1} {2}'
                          .format(year, month, day))
        self.logger.debug('Using Scene Center Hour = {0:0>2}'.format(hour))

        del global_metadata
        del xml

        # Determine the 3hr increments to use from the auxillary data
        # We want the one before and after the scene acquisition time
        # and convert back to formatted strings
        hour_1 = hour - (hour % 3)
        t_delta = timedelta(hours=3)  # allows easy advance to the next day

        date_1 = datetime(year, month, day, hour_1)
        date_2 = date_1 + t_delta
        self.logger.debug('Date 1 = {0}'.format(str(date_1)))
        self.logger.debug('Date 2 = {0}'.format(str(date_2)))

        for parm in self.parms_to_extract:
            # Build the source filenames for date 1
            filename = self.aux_name_template.format(parm,
                                                     date_1.year,
                                                     date_1.month,
                                                     date_1.day,
                                                     date_1.hour * 100,
                                                     'hdr')

            aux_path = (self.aux_path_template.format(date_1.year,
                                                      date_1.month,
                                                      date_1.day))

            hdr_1_path = self.dir_template.format(aux_path, filename)

            grb_1_path = hdr_1_path.replace('.hdr', '.grb')

            self.logger.info('Using {0}'.format(hdr_1_path))
            self.logger.info('Using {0}'.format(grb_1_path))

            # Build the source filenames for date 2
            filename = self.aux_name_template.format(parm,
                                                     date_2.year,
                                                     date_2.month,
                                                     date_2.day,
                                                     date_2.hour * 100,
                                                     'hdr')

            aux_path = (self.aux_path_template.format(date_2.year,
                                                      date_2.month,
                                                      date_2.day))

            hdr_2_path = self.dir_template.format(aux_path, filename)

            grb_2_path = hdr_2_path.replace('.hdr', '.grb')

            self.logger.info('Using {0}'.format(hdr_2_path))
            self.logger.info('Using {0}'.format(grb_2_path))

            # Verify that the files we need exist
            if (not os.path.exists(hdr_1_path) or
                    not os.path.exists(hdr_2_path) or
                    not os.path.exists(grb_1_path) or
                    not os.path.exists(grb_2_path)):
                raise Exception('Required LST AUX files are missing')

            # Date 1
            output_dir = '{0}_1'.format(parm)
            self.extract_grib_data(hdr_1_path, grb_1_path, output_dir)

            # Date 2
            output_dir = '{0}_2'.format(parm)
            self.extract_grib_data(hdr_2_path, grb_2_path, output_dir)
    def generate_product(self):
        '''
        Description:
            Provides the main processing algorithm for generating the
            estimated Landsat emissivity product.  It produces the final
            emissivity product.
        '''

        self.logger = logging.getLogger(__name__)

        self.logger.info('Start - Estimate Landsat Emissivity')

        try:
            self.retrieve_metadata_information()
        except Exception:
            self.logger.exception('Failed reading input XML metadata file')
            raise

        try:
            self.determine_sensor_specific_coefficients()
        except Exception:
            self.logger.exception('Failed determining sensor coefficients')
            raise

        # Register all the gdal drivers and choose the GeoTiff for our temp
        # output
        gdal.AllRegister()
        geotiff_driver = gdal.GetDriverByName('GTiff')
        envi_driver = gdal.GetDriverByName('ENVI')

        # ====================================================================
        # Build NDVI in memory
        self.logger.info('Building TOA based NDVI band for Landsat data')

        # NIR ----------------------------------------------------------------
        data_set = gdal.Open(self.toa_nir_name)
        x_dim = data_set.RasterXSize  # They are all the same size
        y_dim = data_set.RasterYSize
        ls_nir_data = data_set.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)
        nir_no_data_locations = np.where(ls_nir_data == self.no_data_value)
        ls_nir_data = ls_nir_data * self.toa_nir_scale_factor

        # RED ----------------------------------------------------------------
        data_set = gdal.Open(self.toa_red_name)
        ls_red_data = data_set.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim)
        red_no_data_locations = np.where(ls_red_data == self.no_data_value)
        ls_red_data = ls_red_data * self.toa_red_scale_factor

        # NDVI ---------------------------------------------------------------
        ls_ndvi_data = ((ls_nir_data - ls_red_data) /
                        (ls_nir_data + ls_red_data))

        # Cleanup no data locations
        ls_ndvi_data[nir_no_data_locations] = self.no_data_value
        ls_ndvi_data[red_no_data_locations] = self.no_data_value

        if self.keep_intermediate_data:
            geo_transform = data_set.GetGeoTransform()
            ds_srs = osr.SpatialReference()
            ds_srs.ImportFromWkt(data_set.GetProjection())

        # Memory cleanup
        del ls_red_data
        del ls_nir_data
        del nir_no_data_locations
        del red_no_data_locations

        # ====================================================================
        # Build NDSI in memory
        self.logger.info('Building TOA based NDSI band for Landsat data')

        # GREEN --------------------------------------------------------------
        data_set = gdal.Open(self.toa_green_name)
        ls_green_data = data_set.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)
        green_no_data_locations = (np.where(
            ls_green_data == self.no_data_value))
        ls_green_data = ls_green_data * self.toa_green_scale_factor

        # SWIR1 --------------------------------------------------------------
        data_set = gdal.Open(self.toa_swir1_name)
        ls_swir1_data = data_set.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)
        swir1_no_data_locations = (np.where(
            ls_swir1_data == self.no_data_value))
        ls_swir1_data = ls_swir1_data * self.toa_swir1_scale_factor

        # Build the Landsat TOA NDSI data
        self.logger.info('Building TOA based NDSI for Landsat data')
        ls_ndsi_data = ((ls_green_data - ls_swir1_data) /
                        (ls_green_data + ls_swir1_data))

        # Cleanup no data locations
        ls_ndsi_data[green_no_data_locations] = self.no_data_value
        # Cleanup no data locations
        ls_ndsi_data[swir1_no_data_locations] = self.no_data_value

        # Memory cleanup
        del ls_green_data
        del ls_swir1_data
        del green_no_data_locations
        del swir1_no_data_locations

        # Save for the output products
        ds_tmp_srs = osr.SpatialReference()
        ds_tmp_srs.ImportFromWkt(data_set.GetProjection())
        ds_tmp_transform = data_set.GetGeoTransform()

        # Memory cleanup
        del data_set

        # Save the locations for the specfied snow pixels
        self.logger.info('Determine snow pixel locations')
        selected_snow_locations = np.where(ls_ndsi_data > 0.4)

        # Save ndvi and ndsi no data locations
        ndvi_no_data_locations = np.where(ls_ndvi_data == self.no_data_value)
        ndsi_no_data_locations = np.where(ls_ndsi_data == self.no_data_value)

        # Memory cleanup
        del ls_ndsi_data

        # Turn all negative values to zero
        # Use a realy small value so that we don't have negative zero (-0.0)
        ls_ndvi_data[ls_ndvi_data < 0.0000001] = 0

        if self.keep_intermediate_data:
            self.logger.info('Writing Landsat NDVI raster')
            util.Geo.generate_raster_file(geotiff_driver,
                                          'internal_landsat_ndvi.tif',
                                          ls_ndvi_data, x_dim, y_dim,
                                          geo_transform, ds_srs.ExportToWkt(),
                                          self.no_data_value, gdal.GDT_Float32)

        # Build the estimated Landsat EMIS data from the ASTER GED data and
        # warp it to the Landsat scenes projection and image extents
        # For convenience the ASTER NDVI is also extracted and warped to the
        # Landsat scenes projection and image extents
        self.logger.info('Build thermal emissivity band and'
                         ' retrieve ASTER NDVI')
        (ls_emis_warped_name,
         aster_ndvi_warped_name) = self.build_ls_emis_data(geotiff_driver)

        # Load the warped estimated Landsat EMIS into memory
        data_set = gdal.Open(ls_emis_warped_name)
        ls_emis_data = data_set.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)
        ls_emis_gap_locations = np.where(ls_emis_data == 0)
        ls_emis_no_data_locations = (np.where(
            ls_emis_data == self.no_data_value))

        # Load the warped ASTER NDVI into memory
        data_set = gdal.Open(aster_ndvi_warped_name)
        aster_ndvi_data = data_set.GetRasterBand(1).ReadAsArray(
            0, 0, x_dim, y_dim)
        aster_ndvi_gap_locations = np.where(aster_ndvi_data == 0)
        aster_ndvi_no_data_locations = (np.where(
            aster_ndvi_data == self.no_data_value))

        # Turn all negative values to zero
        # Use a realy small value so that we don't have negative zero (-0.0)
        aster_ndvi_data[aster_ndvi_data < 0.0000001] = 0

        # Memory cleanup
        del data_set

        if not self.keep_intermediate_data:
            # Cleanup the temp files since we have them in memory
            if os.path.exists(ls_emis_warped_name):
                os.unlink(ls_emis_warped_name)
            if os.path.exists(aster_ndvi_warped_name):
                os.unlink(aster_ndvi_warped_name)

        self.logger.info('Normalizing Landsat and ASTER NDVI')
        # Normalize Landsat NDVI by max value
        max_ls_ndvi = ls_ndvi_data.max()
        self.logger.info('Max LS NDVI {0}'.format(max_ls_ndvi))
        ls_ndvi_data = ls_ndvi_data / float(max_ls_ndvi)

        if self.keep_intermediate_data:
            self.logger.info('Writing Landsat NDVI NORM MAX raster')
            util.Geo.generate_raster_file(
                geotiff_driver, 'internal_landsat_ndvi_norm_max.tif',
                ls_ndvi_data, x_dim, y_dim, geo_transform,
                ds_srs.ExportToWkt(), self.no_data_value, gdal.GDT_Float32)

        # Normalize ASTER NDVI by max value
        max_aster_ndvi = aster_ndvi_data.max()
        self.logger.info('Max ASTER NDVI {0}'.format(max_aster_ndvi))
        aster_ndvi_data = aster_ndvi_data / float(max_aster_ndvi)

        if self.keep_intermediate_data:
            self.logger.info('Writing Aster NDVI NORM MAX raster')
            util.Geo.generate_raster_file(geotiff_driver,
                                          'internal_aster_ndvi_norm_max.tif',
                                          aster_ndvi_data, x_dim, y_dim,
                                          geo_transform, ds_srs.ExportToWkt(),
                                          self.no_data_value, gdal.GDT_Float32)

        # Soil - From prototype code variable name
        self.logger.info('Calculating EMIS Final')
        with np.errstate(divide='ignore'):
            ls_emis_final = ((ls_emis_data - 0.975 * aster_ndvi_data) /
                             (1.0 - aster_ndvi_data))

        # Memory cleanup
        del aster_ndvi_data
        del ls_emis_data

        # Adjust estimated Landsat EMIS for vegetation and snow, to generate
        # the final Landsat EMIS data
        self.logger.info('Adjusting estimated EMIS for vegetation')
        ls_emis_final = (self.vegetation_coeff * ls_ndvi_data + ls_emis_final *
                         (1.0 - ls_ndvi_data))

        # Medium snow
        self.logger.info('Adjusting estimated EMIS for snow')
        ls_emis_final[selected_snow_locations] = self.snow_emis_value

        # Memory cleanup
        del ls_ndvi_data
        del selected_snow_locations

        # Add the fill and scan gaps and ASTER gaps back into the results,
        # since they may have been lost
        self.logger.info('Adding fill and data gaps back into the estimated'
                         ' Landsat emissivity results')
        ls_emis_final[ls_emis_no_data_locations] = self.no_data_value
        ls_emis_final[ls_emis_gap_locations] = self.no_data_value
        ls_emis_final[aster_ndvi_no_data_locations] = self.no_data_value
        ls_emis_final[aster_ndvi_gap_locations] = self.no_data_value
        ls_emis_final[ndvi_no_data_locations] = self.no_data_value
        ls_emis_final[ndsi_no_data_locations] = self.no_data_value

        # Memory cleanup
        del ls_emis_no_data_locations
        del ls_emis_gap_locations
        del aster_ndvi_no_data_locations
        del aster_ndvi_gap_locations

        product_id = self.xml_filename.split('.xml')[0]
        ls_emis_img_filename = ''.join([product_id, '_emis', '.img'])
        ls_emis_hdr_filename = ''.join([product_id, '_emis', '.hdr'])
        ls_emis_aux_filename = ''.join([ls_emis_img_filename, '.aux', '.xml'])

        self.logger.info('Creating {0}'.format(ls_emis_img_filename))
        util.Geo.generate_raster_file(envi_driver, ls_emis_img_filename,
                                      ls_emis_final, x_dim,
                                      y_dim, ds_tmp_transform,
                                      ds_tmp_srs.ExportToWkt(),
                                      self.no_data_value, gdal.GDT_Float32)

        self.logger.info('Updating {0}'.format(ls_emis_hdr_filename))
        util.Geo.update_envi_header(ls_emis_hdr_filename, self.no_data_value)

        # Remove the *.aux.xml file generated by GDAL
        if os.path.exists(ls_emis_aux_filename):
            os.unlink(ls_emis_aux_filename)

        self.logger.info('Adding {0} to {1}'.format(ls_emis_img_filename,
                                                    self.xml_filename))
        # Add the estimated Landsat emissivity to the metadata XML
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        bands = espa_xml.get_bands()
        sensor_code = product_id[0:3]
        source_product = 'toa_refl'

        # Find the TOA Band 1 to use for the specific band details
        base_band = None
        for band in bands.band:
            if band.product == source_product and band.name == 'toa_band1':
                base_band = band

        if base_band is None:
            raise Exception('Failed to find the TOA BLUE band'
                            ' in the input data')

        emis_band = metadata_api.band(product='lst_temp',
                                      source=source_product,
                                      name='landsat_emis',
                                      category='image',
                                      data_type='FLOAT32',
                                      nlines=base_band.get_nlines(),
                                      nsamps=base_band.get_nsamps(),
                                      fill_value=str(self.no_data_value))

        emis_band.set_short_name('{0}EMIS'.format(sensor_code))
        emis_band.set_long_name('Landsat emissivity estimated from ASTER GED'
                                ' data')
        emis_band.set_file_name(ls_emis_img_filename)
        emis_band.set_data_units('Emissivity Coefficient')

        pixel_size = metadata_api.pixel_size(base_band.pixel_size.x,
                                             base_band.pixel_size.x,
                                             base_band.pixel_size.units)
        emis_band.set_pixel_size(pixel_size)

        emis_band.set_resample_method('none')

        valid_range = metadata_api.valid_range(min=0.0, max=1.0)
        emis_band.set_valid_range(valid_range)

        # Set the date, but first clean the microseconds off of it
        production_date = (datetime.datetime.strptime(
            datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
            '%Y-%m-%dT%H:%M:%S'))

        emis_band.set_production_date(production_date)

        emis_band.set_app_version(util.Version.app_version())

        bands.add_band(emis_band)

        # Write the XML metadata file out
        with open(self.xml_filename, 'w') as output_fd:
            metadata_api.export(output_fd, espa_xml)

        # Memory cleanup
        del ls_emis_final

        self.logger.info('Completed - Estimate Landsat Emissivity')
Пример #19
0
    def retrieve_metadata_information(self):
        '''
        Description:
            Loads and reads required information from the metadata XML file.
        '''

        # Read the XML metadata
        espa_xml = metadata_api.parse(self.xml_filename, silence=True)
        # Grab the global metadata object
        global_metadata = espa_xml.get_global_metadata()
        # Grab the bands metadata object
        bands = espa_xml.get_bands()

        self.thermal_name = ''
        self.transmittance_name = ''
        self.upwelled_name = ''
        self.downwelled_name = ''
        self.emissivity_name = ''

        # Find the TOA bands to extract information from
        for band in bands.band:
            if (band.product == 'st_intermediate'
                    and band.name == 'st_thermal_radiance'):
                self.thermal_name = band.get_file_name()

            if (band.product == 'st_intermediate'
                    and band.name == 'st_atmospheric_transmittance'):
                self.transmittance_name = band.get_file_name()

            if (band.product == 'st_intermediate'
                    and band.name == 'st_upwelled_radiance'):
                self.upwelled_name = band.get_file_name()

            if (band.product == 'st_intermediate'
                    and band.name == 'st_downwelled_radiance'):
                self.downwelled_name = band.get_file_name()

            if (band.product == 'st_intermediate' and band.name == 'emis'):
                self.emissivity_name = band.get_file_name()

        # Error if we didn't find the required TOA bands in the data
        if len(self.thermal_name) <= 0:
            raise Exception('Failed to find the st_thermal_radiance band'
                            ' in the input data')
        if len(self.transmittance_name) <= 0:
            raise Exception('Failed to find the st_atmospheric_transmittance'
                            ' in the input data')
        if len(self.upwelled_name) <= 0:
            raise Exception('Failed to find the st_upwelled_radiance'
                            ' in the input data')
        if len(self.downwelled_name) <= 0:
            raise Exception('Failed to find the st_downwelled_radiance'
                            ' in the input data')
        if len(self.emissivity_name) <= 0:
            raise Exception('Failed to find the emis' ' in the input data')

        # Save for later
        self.satellite = global_metadata.satellite

        del bands
        del global_metadata
        del espa_xml
def read_info_from_metadata(xml_filename):
    # Read the XML metadata
    espa_xml = metadata_api.parse(xml_filename, silence=True)
    # Grab the global metadata object
    gm = espa_xml.get_global_metadata()
    # Grab the bands metadata object
    bands = espa_xml.get_bands()

    ls_info = LandsatInfo()

    toa_bt_name = ""
    toa_green_name = ""
    toa_red_name = ""
    toa_nir_name = ""
    toa_swir1_name = ""
    toa_green_scale_factor = 1.0
    toa_red_scale_factor = 1.0
    toa_nir_scale_factor = 1.0
    toa_swir1_scale_factor = 1.0

    # Find the TOA bands to extract information from
    for band in bands.band:
        if band.product == "toa_refl" and band.name == "toa_band2":
            toa_green_name = band.get_file_name()
            toa_green_scale_factor = float(band.scale_factor)

        if band.product == "toa_refl" and band.name == "toa_band3":
            toa_red_name = band.get_file_name()
            toa_red_scale_factor = float(band.scale_factor)

        if band.product == "toa_refl" and band.name == "toa_band4":
            toa_nir_name = band.get_file_name()
            toa_nir_scale_factor = float(band.scale_factor)

        if band.product == "toa_refl" and band.name == "toa_band5":
            toa_swir1_name = band.get_file_name()
            toa_swir1_scale_factor = float(band.scale_factor)

        if band.product == "toa_bt" and band.category == "image":
            # Get the output pixel size
            ls_info.x_pixel_size = band.pixel_size.x
            ls_info.y_pixel_size = band.pixel_size.y

            toa_bt_name = band.get_file_name()

            # Get the output proj4 string
            ls_info.dest_proj4 = get_proj4_projection_string(toa_bt_name)

    # Error if we didn't find the required TOA bands in the data
    if len(toa_green_name) <= 0:
        raise Exception("Failed to find the TOA GREEN band in the input data")
    if len(toa_red_name) <= 0:
        raise Exception("Failed to find the TOA RED band in the input data")
    if len(toa_nir_name) <= 0:
        raise Exception("Failed to find the TOA NIR band in the input data")
    if len(toa_swir1_name) <= 0:
        raise Exception("Failed to find the TOA SWIR1 band in the input data")
    if len(toa_bt_name) <= 0:
        raise Exception("Failed to find the TOA BT band in the input data")

    # Determine the bounding geographic coordinates for the ASTER tiles we
    # will need
    ls_info.north = math.ceil(gm.bounding_coordinates.north)
    ls_info.south = math.floor(gm.bounding_coordinates.south)
    ls_info.east = math.ceil(gm.bounding_coordinates.east)
    ls_info.west = math.floor(gm.bounding_coordinates.west)

    # Determine the UTM projection corner points
    for cp in gm.projection_information.corner_point:
        if cp.location == "UL":
            ls_info.min_x_extent = cp.x
            ls_info.max_y_extent = cp.y
        if cp.location == "LR":
            ls_info.max_x_extent = cp.x
            ls_info.min_y_extent = cp.y

    # Adjust the UTM coordinates for image extents becuse they are in center
    # of pixel, and we need to supply the warping with actual extents
    ls_info.min_x_extent = ls_info.min_x_extent - ls_info.x_pixel_size * 0.5
    ls_info.max_x_extent = ls_info.max_x_extent + ls_info.x_pixel_size * 0.5
    ls_info.min_y_extent = ls_info.min_y_extent - ls_info.y_pixel_size * 0.5
    ls_info.max_y_extent = ls_info.max_y_extent + ls_info.y_pixel_size * 0.5

    # Save for later
    satellite = gm.satellite

    del (bands)
    del (gm)
    del (espa_xml)

    return (
        ls_info,
        toa_bt_name,
        toa_green_name,
        toa_red_name,
        toa_nir_name,
        toa_swir1_name,
        toa_green_scale_factor,
        toa_red_scale_factor,
        toa_nir_scale_factor,
        toa_swir1_scale_factor,
        satellite,
    )