def yaml_check(self, resample_type):
        '''
        Check crossmul specifics from YAML
        '''
        error_channel = journal.error('CrossmulRunConfig.yaml_check')

        scratch_path = self.cfg['product_path_group']['scratch_path']
        # if coregistered_slc_path not provided, use scratch_path as source for coregistered SLCs
        if 'coregistered_slc_path' not in self.cfg['processing']['crossmul']:
            self.cfg['processing']['crossmul'][
                'coregistered_slc_path'] = scratch_path

        # check whether coregistered_slc_path is a directory or file
        coregistered_slc_path = self.cfg['processing']['crossmul'][
            'coregistered_slc_path']
        if not os.path.isdir(coregistered_slc_path) and not os.path.isfile(
                coregistered_slc_path):
            err_str = f"{coregistered_slc_path} is invalid; needs to be a file or directory."
            error_channel.log(err_str)
            raise ValueError(err_str)

        # check if required coregistered frequency/polarization rasters exist in directory or HDF5 file
        # Distinguish between coarse and fine resample_slc directories
        freq_pols = self.cfg['processing']['input_subset'][
            'list_of_frequencies']
        frequencies = freq_pols.keys()
        if os.path.isdir(coregistered_slc_path):
            if resample_type not in ['coarse', 'fine']:
                err_str = f"{resample_type} not a valid resample slc type"
                error_channel.log(err_str)
                raise ValueError(err_str)
            helpers.check_mode_directory_tree(coregistered_slc_path,
                                              f'{resample_type}_resample_slc',
                                              frequencies, freq_pols)
        else:
            helpers.check_hdf5_freq_pols(coregistered_slc_path, freq_pols)

        # flatten is bool False disables flattening in crossmul
        # flatten is bool True runs flatten and sets data directory to scratch
        # flatten is str assumes value is path to data directory
        # Data directory contains range offset rasters
        # The following directory tree is required:
        # flatten
        # └── geo2rdr
        #     └── freq(A,B)
        #         └── range.off
        # flatten defaults to bool True
        flatten = self.cfg['processing']['crossmul']['flatten']
        if flatten:
            # check if flatten is bool and if true as scratch path (str)
            if isinstance(flatten, bool):
                self.cfg['processing']['crossmul']['flatten'] = scratch_path
                flatten = scratch_path
            # check if required frequency range offsets exist
            helpers.check_mode_directory_tree(flatten, 'geo2rdr', frequencies)
        else:
            self.cfg['processing']['crossmul']['flatten'] = None

        if 'oversample' not in self.cfg['processing']['crossmul']:
            self.cfg['processing']['crossmul']['oversample'] = 2
    def yaml_check(self, resample_type):
        '''
        Check resample specifics from YAML.
        '''
        error_channel = journal.error('ResampleSlcRunConfig.yaml_check')

        # Extract frequency
        freq_pols = self.cfg['processing']['input_subset'][
            'list_of_frequencies']
        frequencies = freq_pols.keys()

        if resample_type not in ['coarse', 'fine']:
            err_str = f"{resample_type} is not a valid resample mode"
            error_channel.log(err_str)
            raise ValueError(err_str)

        # For insar.py, offsets_dir comes from the previous step of the
        # workflow through scratch_path
        resample_key = f'{resample_type}_resample'
        if self.cfg['processing'][resample_key]['offsets_dir'] is None:
            self.cfg['processing'][resample_key]['offsets_dir'] = \
                self.cfg['product_path_group']['scratch_path']
        offsets_dir = self.cfg['processing'][resample_key]['offsets_dir']

        # Check directory structure and existence of offset files depending on
        # the selected resample type
        if resample_type == 'coarse':
            helpers.check_mode_directory_tree(offsets_dir, 'geo2rdr',
                                              frequencies)
            for freq in frequencies:
                rg_off = os.path.join(offsets_dir, 'geo2rdr', f'freq{freq}',
                                      'range.off')
                az_off = rg_off.replace('range', 'azimuth')
                if not os.path.exists(rg_off) or not os.path.exists(az_off):
                    err_str = f'{rg_off} and {az_off} offsets files do not exist'
                    error_channel.log(err_str)
                    raise FileNotFoundError(err_str)
        else:
            # use the HH or VV rubbersheeted offsets to fine
            # resample the secondary SLC. Check for the offsets existence
            for freq in frequencies:
                for pol in set.intersection(set(['HH', 'VV']),
                                            set(freq_pols[freq])):
                    rg_off = os.path.join(offsets_dir, 'rubbersheet_offsets',
                                          f'freq{freq}', pol, 'range.off.vrt')
                    az_off = rg_off.replace('range', 'azimuth')
                    if not os.path.exists(rg_off) or not os.path.exists(
                            az_off):
                        err_str = f"{rg_off} and {az_off} files do not exists. HH and" \
                                  f"VV rubbersheet offsets required to run fine resampling"
                        error_channel.log(err_str)
                        raise FileNotFoundError(err_str)
Example #3
0
    def yaml_check(self):
        '''
        Check geo2rdr specifics from YAML.
        '''
        # Use scratch as topo_path if none given in YAML
        if 'topo_path' not in self.cfg['processing']['geo2rdr']:
            self.cfg['processing']['geo2rdr']['topo_path'] = self.cfg[
                'product_path_group']['scratch_path']

        # Check topo directory structure
        topo_path = self.cfg['processing']['geo2rdr']['topo_path']
        freq_pols = self.cfg['processing']['input_subset'][
            'list_of_frequencies']
        frequencies = freq_pols.keys()
        helpers.check_mode_directory_tree(topo_path, 'rdr2geo', frequencies)
Example #4
0
    def yaml_check(self):
        '''
        Check dense offset specifics from YAML file
        '''

        error_channel = journal.error('DenseOffsetsRunConfig.yaml_check')
        scratch_path = self.cfg['product_path_group']['scratch_path']

        # If coregistered_slc_path is None, assume that we run dense_offsets
        # as part of insar.py. In this case, coregistered_slc_path comes
        # from the previous processing step via scratch_path
        if self.cfg['processing']['dense_offsets'][
                'coregistered_slc_path'] is None:
            self.cfg['processing']['dense_offsets'][
                'coregistered_slc_path'] = scratch_path

        # Check if coregistered_slc_path is a path or directory
        coregistered_slc_path = self.cfg['processing']['dense_offsets'][
            'coregistered_slc_path']
        if not os.path.exists(coregistered_slc_path):
            err_str = f"{coregistered_slc_path} invalid; must be a file or directory"
            error_channel.log(err_str)
            raise ValueError(err_str)

        # Check if geometry-coregistered rasters
        # exists in directory or HDF5 file
        freq_pols = self.cfg['processing']['input_subset'][
            'list_of_frequencies']
        frequencies = freq_pols.keys()

        if os.path.isdir(coregistered_slc_path):
            helpers.check_mode_directory_tree(coregistered_slc_path,
                                              'coarse_resample_slc',
                                              frequencies, freq_pols)
        else:
            helpers.check_hdf5_freq_pols(coregistered_slc_path, freq_pols)
Example #5
0
    def yaml_check(self):
        '''
        Check rubbersheet specifics from YAML
        '''

        error_channel = journal.error('RubbersheetRunConfig.yaml_check')
        scratch_path = self.cfg['product_path_group']['scratch_path']

        # Check if dense_offset is enabled, if not, and rubbersheet is enabled
        # throw an error and do not run rubbersheet
        if not self.cfg['processing']['dense_offsets']['enabled'] and \
                self.cfg['processing']['rubbersheet']['enabled']:
            err_str = 'Dense_offsets must be enabled to run rubbersheet'
            error_channel.log(err_str)
            raise RuntimeError(err_str)

        # Check if offset filtering options are properly allocated. The schema
        # will throw an error if the filter type is not correct.
        filter_type = self.cfg['processing']['rubbersheet']['offsets_filter']

        if filter_type == 'median':
            if 'filter_size_range' not in self.cfg['processing']['rubbersheet']['median']:
                self.cfg['processing']['rubbersheet']['median'][
                    'filter_size_range'] = 5
            if 'filter_size_azimuth' not in \
                    self.cfg['processing']['rubbersheet']['median']:
                self.cfg['processing']['rubbersheet']['median'][
                    'filter_size_azimuth'] = 5
        elif filter_type == 'gaussian':
            if 'sigma_range' not in self.cfg['processing']['rubbersheet']['gaussian']:
                self.cfg['processing']['rubbersheet']['gaussian']['sigma_range'] = 1
            if 'sigma_azimuth' not in self.cfg['processing']['rubbersheet']['gaussian']:
                self.cfg['processing']['rubbersheet']['gaussian']['sigma_azimuth'] = 1

        # If dense_offsets_path is None, assume that we run rubbersheet
        # as part of insar.py. In this case, dense_offsets_path comes from
        # the previous step (dense_offsets) via scratch_path
        if self.cfg['processing']['rubbersheet']['dense_offsets_path'] is None:
            self.cfg['processing']['rubbersheet'][
                'dense_offsets_path'] = scratch_path

        # If geo2rdr_offsets path is None, assume it is scratch_path (see above)
        if self.cfg['processing']['rubbersheet']['geo2rdr_offsets_path'] is None:
            self.cfg['processing']['rubbersheet'][
                'geo2rdr_offsets_path'] = scratch_path

        dense_offsets_path = self.cfg['processing']['rubbersheet'][
            'dense_offsets_path']
        geo2rdr_offsets_path = self.cfg['processing']['rubbersheet'][
            'geo2rdr_offsets_path']
        freq_pols = self.cfg['processing']['input_subset'][
            'list_of_frequencies']
        frequencies = freq_pols.keys()

        # Check if dense_offsets_path is a directory.
        # If yes, check it has dense offsets estimated for
        # the required frequencies and polarizations
        if os.path.isdir(dense_offsets_path):
            helpers.check_mode_directory_tree(dense_offsets_path,
                                              'dense_offsets',
                                              frequencies, freq_pols)
        else:
            # If not a directory, throw an error
            err_str = f"{dense_offsets_path} is invalid; needs to be a directory"
            error_channel.log(err_str)
            raise ValueError(err_str)

        # Check if geo2rdr offset path has appropriate structure
        if os.path.isdir(geo2rdr_offsets_path):
            helpers.check_mode_directory_tree(geo2rdr_offsets_path,
                                              'geo2rdr', frequencies)
        else:
            # If not a directory, throw an error
            err_str = f"{geo2rdr_offsets_path} is invalid; needs to be a directory"
            error_channel.log(err_str)
            raise ValueError(err_str)