Esempio n. 1
0
    def setUp(self):
        '''setup'''

        self.input_xml = os.path.join(self.validation_path,
                                      'LT50420342011119PAC01.xml')

        # Specify the XML metadata file defining the data to process
        self.processor = AuxNARRGribProcessor(self.input_xml)

        # Process the associated AUX data
        self.processor.extract_aux_data()
    def setUp(self):
        '''setup'''

        self.input_xml = os.path.join(self.validation_path,
                                      'LT50420342011119PAC01.xml')

        # Specify the XML metadata file defining the data to process
        self.processor = AuxNARRGribProcessor(self.input_xml)

        # Process the associated AUX data
        self.processor.extract_aux_data()
Esempio n. 3
0
class AuxNARRGribProcessor_TestCase(LSRD_ValidationFramework):
    '''Tests for Grib file processing.'''
    def __init__(self, *args, **kwargs):
        self.name = 'AuxNARRGribProcessor_TestCase'
        super(AuxNARRGribProcessor_TestCase, self).__init__(*args, **kwargs)

        # Validation data is presummed to be available if the directory exists
        self.validation_path = os.path.join(self.lsrd_validation_dir,
                                            self.name)
        if not os.path.isdir(self.validation_path):
            raise Exception('Missing validation data for [{0}]'.format(
                self.name))

        # Define the directories that are produced
        self.directories = [
            'HGT_1', 'HGT_2', 'SPFH_1', 'SPFH_2', 'TMP_1', 'TMP_2'
        ]

    def setUp(self):
        '''setup'''

        self.input_xml = os.path.join(self.validation_path,
                                      'LT50420342011119PAC01.xml')

        # Specify the XML metadata file defining the data to process
        self.processor = AuxNARRGribProcessor(self.input_xml)

        # Process the associated AUX data
        self.processor.extract_aux_data()

    def tearDown(self):
        '''Cleanup'''

        for directory in self.directories:
            if os.path.isdir(directory):
                shutil.rmtree(directory)

    def test_process_grib_data(self):
        '''Test the processing of grib files from our internal archive.'''

        for directory in self.directories:
            self.assertEqual(True, os.path.isdir(directory))

            # Start with the local files
            files = glob.glob(os.path.join(directory, '*'))

            # Add the validation files
            validation_directory = os.path.join(self.validation_path,
                                                directory)
            files.extend(glob.glob(os.path.join(validation_directory, '*')))

            # We only want the filenames
            files = [os.path.basename(x) for x in files]

            # Make a unique list of the filenames
            files = sorted(list(set(files)))

            # Process through each file
            for filename in files:
                local_file = os.path.join(directory, filename)
                validation_file = os.path.join(validation_directory, filename)

                self.assertFilesEqual(validation_file, local_file)
class AuxNARRGribProcessor_TestCase(LSRD_ValidationFramework):
    '''Tests for Grib file processing.'''

    def __init__(self, *args, **kwargs):
        self.name = 'AuxNARRGribProcessor_TestCase'
        super(AuxNARRGribProcessor_TestCase, self).__init__(*args, **kwargs)

        # Validation data is presummed to be available if the directory exists
        self.validation_path = os.path.join(self.lsrd_validation_dir,
                                            self.name)
        if not os.path.isdir(self.validation_path):
            raise Exception('Missing validation data for [{0}]'
                            .format(self.name))

        # Define the directories that are produced
        self.directories = ['HGT_1', 'HGT_2',
                            'SPFH_1', 'SPFH_2',
                            'TMP_1', 'TMP_2']

    def setUp(self):
        '''setup'''

        self.input_xml = os.path.join(self.validation_path,
                                      'LT50420342011119PAC01.xml')

        # Specify the XML metadata file defining the data to process
        self.processor = AuxNARRGribProcessor(self.input_xml)

        # Process the associated AUX data
        self.processor.extract_aux_data()

    def tearDown(self):
        '''Cleanup'''

        for directory in self.directories:
            if os.path.isdir(directory):
                shutil.rmtree(directory)

    def test_process_grib_data(self):
        '''Test the processing of grib files from our internal archive.'''

        for directory in self.directories:
            self.assertEqual(True, os.path.isdir(directory))

            # Start with the local files
            files = glob.glob(os.path.join(directory, '*'))

            # Add the validation files
            validation_directory = os.path.join(self.validation_path,
                                                directory)
            files.extend(glob.glob(os.path.join(validation_directory, '*')))

            # We only want the filenames
            files = [os.path.basename(x) for x in files]

            # Make a unique list of the filenames
            files = sorted(list(set(files)))

            # Process through each file
            for filename in files:
                local_file = os.path.join(directory, filename)
                validation_file = os.path.join(validation_directory, filename)

                self.assertFilesEqual(validation_file, local_file)
def generate_lst(xml_filename,
                 only_extract_aux_data=False,
                 keep_lst_temp_data=False,
                 keep_intermediate_data=False,
                 debug=False):
    '''
    Description:
        Provides the glue code for generating LST products.
    '''

    # Get the logger
    logger = logging.getLogger(__name__)

    # Retrieval and initial processing of the required AUX data
    try:
        logger.info('Extracting LST AUX data')
        current_processor = AuxNARRGribProcessor(xml_filename)
        current_processor.extract_aux_data()
    except Exception:
        logger.error('Failed processing auxillary NARR data')
        raise

    if only_extract_aux_data:
        logger.info('Stopping - User requested to stop after extracting'
                    ' LST AUX data')
        return

    # Generate the thermal, upwelled, and downwelled radiance bands as well as
    # the atmospheric transmittance band
    cmd = ['lst_intermediate_data',
           '--xml', xml_filename,
           '--verbose']
    if debug:
        cmd.append('--debug')

    cmd = ' '.join(cmd)
    output = ''
    try:
        logger.info('Calling [{0}]'.format(cmd))
        output = util.System.execute_cmd(cmd)
    except Exception:
        logger.error('Failed creating intermediate data')
        raise
    finally:
        if len(output) > 0:
            logger.info(output)

    # Generate Estimated Landsat Emissivity band
    try:
        current_processor = (
            estimate_landsat_emissivity.EstimateLandsatEmissivity(
                xml_filename, keep_intermediate_data))
        current_processor.generate_product()
    except Exception:
        logger.error('Failed creating Estimated Landsat Emissivity data')
        raise

    # Generate Land Surface Temperature band
    try:
        current_processor = build_lst_data.BuildLSTData(xml_filename)
        current_processor.generate_data()
    except Exception:
        logger.error('Failed processing Land Surface Temperature')
        raise

    # Cleanup
    if not keep_intermediate_data:

        # Remove the grib extraction directories
        shutil.rmtree('HGT_1', ignore_errors=True)
        shutil.rmtree('HGT_2', ignore_errors=True)
        shutil.rmtree('SPFH_1', ignore_errors=True)
        shutil.rmtree('SPFH_2', ignore_errors=True)
        shutil.rmtree('TMP_1', ignore_errors=True)
        shutil.rmtree('TMP_2', ignore_errors=True)

        # Remove the point directories generated during the core processing
        remove_dirs = set()
        point_filename = 'point_list.txt'
        with open(point_filename, 'r') as point_list_fd:
            remove_dirs = set(list([line.strip()
                                    for line in point_list_fd.readlines()]))

        for dirname in remove_dirs:
            shutil.rmtree(dirname, ignore_errors=False)

        # Finally remove the file
        os.unlink(point_filename)

    if not keep_lst_temp_data:
        util.Metadata.remove_products(xml_filename, ['lst_temp'])
def generate_lst(xml_filename,
                 only_extract_aux_data=False,
                 keep_lst_temp_data=False,
                 keep_intermediate_data=False,
                 debug=False):
    '''
    Description:
        Provides the glue code for generating LST products.
    '''

    # Get the logger
    logger = logging.getLogger(__name__)

    # ------------------------------------------------------------------------
    # Retrieval and initial processing of the required AUX data
    try:
        logger.info('Extracting LST AUX data')
        current_processor = AuxNARRGribProcessor(xml_filename)
        current_processor.extract_aux_data()
    except Exception:
        logger.error('Failed processing auxillary NARR data')
        raise

    if only_extract_aux_data:
        logger.info('Stopping - User requested to stop after extracting'
                    ' LST AUX data')
        return

    # Extract the input ID from the xml filename and build some other
    # filenames
    input_id = os.path.splitext(xml_filename)[0]
    # ESPA creates the DEM for us
    dem_filename = '{0}_dem.img'.format(input_id)

    # ------------------------------------------------------------------------
    # Generate the thermal, upwelled, and downwelled radiance bands as well as
    # the atmospheric transmittance band
    cmd = ['l5_7_intermediate_data',
           '--xml', xml_filename,
           '--dem', dem_filename,
           '--verbose']
    if debug:
        cmd.append('--debug')

    cmd = ' '.join(cmd)
    output = ''
    try:
        logger.info('Calling [{0}]'.format(cmd))
        output = util.System.execute_cmd(cmd)
    except Exception:
        logger.error('Failed creating intermediate data')
        raise
    finally:
        if len(output) > 0:
            logger.info(output)

    # ------------------------------------------------------------------------
    # Generate Estimated Landsat Emissivity band
    try:
        current_processor = (
            estimate_landsat_emissivity.EstimateLandsatEmissivity(
                xml_filename, keep_intermediate_data))
        current_processor.generate_product()
    except Exception:
        logger.error('Failed creating Estimated Landsat Emissivity data')
        raise

    # ------------------------------------------------------------------------
    # Generate Land Surface Temperature band
    try:
        current_processor = build_lst_data.BuildLSTData(xml_filename)
        current_processor.generate_data()
    except Exception:
        logger.error('Failed processing Land Surface Temperature')
        raise

    # ------------------------------------------------------------------------
    # Cleanup
    if not keep_intermediate_data:

        # Remove the grib extraction directories
        shutil.rmtree('HGT_1', ignore_errors=True)
        shutil.rmtree('HGT_2', ignore_errors=True)
        shutil.rmtree('SPFH_1', ignore_errors=True)
        shutil.rmtree('SPFH_2', ignore_errors=True)
        shutil.rmtree('TMP_1', ignore_errors=True)
        shutil.rmtree('TMP_2', ignore_errors=True)

        # Remove the point directories generated during the core processing
        remove_dirs = set()
        point_filename = 'point_list.txt'
        with open(point_filename, 'r') as point_list_fd:
            remove_dirs = set(list([line.strip()
                                    for line in point_list_fd.readlines()]))

        for dirname in remove_dirs:
            shutil.rmtree(dirname, ignore_errors=False)

        # Finally remove the file
        os.unlink(point_filename)

    if not keep_lst_temp_data:
        util.Metadata.remove_products(xml_filename, ['lst_temp'])