Пример #1
0
    def __init__(self, filter_configs=None, output_configs=None):

        self.filter_configs = utility.aggregate_configurations(
            filter_configs, TDVAR._local_def_3DVAR_filter_configs)
        self.output_configs = utility.aggregate_configurations(
            output_configs, TDVAR._local_def_3DVAR_output_configs)

        class OldStyle:
            pass

        if issubclass(OldStyle().__class__, object):
            # object-inherited class
            super().__init__(filter_configs=filter_configs,
                             output_configs=self.output_configs)
        else:
            # old-type class
            super(TDVAR, self).__init__(filter_configs=filter_configs,
                                        output_configs=self.output_configs)
        #
        self.model = self.filter_configs['model']
        #
        try:
            self._model_step_size = self.model._default_step_size
        except:
            self._model_step_size = TDVAR.__time_eps
        self._time_eps = TDVAR.__time_eps

        #
        self.__initialized = True
Пример #2
0
    def __init__(self, options_dict=None):
        #
        integration_configs = utility.aggregate_configurations(
            options_dict, FatODE_ERK_ADJ._def_options_dict)
        integration_configs = utility.aggregate_configurations(
            integration_configs, TimeIntegratorBase._def_options_dict)
        #
        # validate and set configurations of the adjoint integrator:
        integration_configs, status, info = self._validate_configs(
            integration_configs)
        self.integration_configs = integration_configs
        if status != 0:
            print("Failed to configure the FATODE time integrator!")
            raise ValueError
            #

        # Configurations are OK, we can proceed...
        self._verbose = self.integration_configs['verbose']

        TimeIntegratorBase.__init__(
            self,
            integration_configs)  # TODO: revisit when unification starts...

        #
        # ==================================================================
        # For testing, we are currently using FATODE-ERK here.
        # After testing, we shall decide where to put the adjoint (here vs.
        #   inside model wrappers/classes).
        #
        # Create a wrapper for the FATODE ERK-ADJOINT:
        # ==================================================================
        # -----------------<Online WRAPPER GENERATION>----------------------
        # ==================================================================
        try:
            from erkadj import erk_adj_f90_integrator as integrate_adjoint  # CAN-NOT be moved up.
        except (ImportError):
            print("Recreating the FATODE Adjoint Integrator..."),
            sys.stdout.flush()
            erkadj_setup.create_wrapper(verbose=self._verbose)
            from erkadj import erk_adj_f90_integrator as integrate_adjoint  # CAN-NOT be moved up.
            print("done...")
        self.__integrate_adjoint = integrate_adjoint
        # ==================================================================
        # ----------------------------<Done!>-------------------------------
        # ==================================================================

        #
        self._initialized = True
Пример #3
0
    def validate_filter_configs(filter_configs, def_filter_configs):
        """
        Aggregate the passed dictionaries with default configurations then make sure parameters are consistent.
        The first argument (filter_configs) is validated, updated with missing entries, and returned.
        
        Args:
            filter_configs: dict,
                A dictionary containing filter configurations. This should be the filter_configs dict 
                passed to the constructor.
                
            def_filter_configs: dict,
                A dictionary containing the default filter configurations. 

        Returns:
            filter_configs: dict,
                Same as the first argument (filter_configs) but validated, adn updated with missing entries.
            
        """
        filter_configs = utility.aggregate_configurations(
            filter_configs, def_filter_configs)
        if filter_configs['filter_name'] is None:
            filter_configs['filter_name'] = 'Unknown_'

        # Since aggregate never cares about the contents, we need to make sure now all parameters are consistent
        if filter_configs['model'] is None:
            raise ValueError(
                "You have to pass a reference to the model object so that"
                "model observations can be created!")
        else:
            if not isinstance(filter_configs['model'], ModelsBase):
                raise ValueError(
                    "Passed model is not an instance of 'ModelsBase'!. Passed: %s"
                    % repr(filter_configs['model']))
        return filter_configs
Пример #4
0
    def __init__(self, smoother_configs=None, output_configs=None):

        self.smoother_configs = utility.aggregate_configurations(smoother_configs, EnKS._local_def_EnKS_smoother_configs)
        self.output_configs = utility.aggregate_configurations(output_configs, EnKS._local_def_EnKS_output_configs)

        class OldStyle: pass
        if issubclass(OldStyle().__class__, object):
            # object-inherited class
            super().__init__(smoother_configs=smoother_configs, output_configs=self.output_configs)
        else:
            # old-type class
            super(EnKS, self).__init__(smoother_configs=smoother_configs, output_configs=self.output_configs)
        #
        self.model = self.smoother_configs['model']
        #
        try:
            self._model_step_size = self.model._default_step_size
        except:
            self._model_step_size = EnKS.__time_eps
        self._time_eps = EnKS.__time_eps
        # print("self.__time_eps", self._time_eps)
        # print("self._model_step_size", self._model_step_size)

        # TODO: Get prior info
        pass
        # raise NotImplementedError

        # Prepare output directories:
        output_configs = self.output_configs
        file_output = output_configs['file_output']
        if file_output:
            output_dir = output_configs['file_output_dir']
            output_dirs = self._prepare_output_paths(output_dir=output_dir, cleanup_out_dir=True)
            self.output_configs.update({'file_output_dir':output_dirs[0],
                                        'smoother_statistics_dir': output_dirs[1],
                                        'model_states_dir':output_dirs[2],
                                        'observations_dir':output_dirs[3],
                                        'update_kernels_dir':output_dirs[4]
                                       })  # in case None was give

        #
        self.__initialized = True
Пример #5
0
    def __init__(self, filter_configs=None, output_configs=None):

        filter_configs = utility.aggregate_configurations(
            filter_configs, KF._local_def_kf_filter_configs)
        output_configs = utility.aggregate_configurations(
            output_configs, KF._local_def_kf_output_configs)

        class OldStyle:
            pass

        if issubclass(OldStyle().__class__, object):
            # object-inherited class
            super().__init__(filter_configs=filter_configs,
                             output_configs=output_configs)
        else:
            # old-stype class
            super(KF, self).__init__(filter_configs=filter_configs,
                                     output_configs=output_configs)
        #
        self.model = self.filter_configs['model']
        #
        self.__initialized = True
Пример #6
0
    def validate_output_configs(output_configs, def_output_configs):
        """
        
        Aggregate the passed dictionaries with default configurations then make sure parameters are consistent.
        The first argument (output_configs) is validated, updated with missing entries, and returned.
        
        Args:
            output_configs: dict,
                A dictionary containing output configurations. This should be the output_configs dict 
                passed to the constructor.
                
            def_output_configs: dict,
                A dictionary containing the default output configurations. 

        Returns:
            output_configs: dict,
                Same as the first argument (output_configs) but validated, adn updated with missing entries.
            
        """
        output_configs = utility.aggregate_configurations(
            output_configs, def_output_configs)
        # Validating file output
        if output_configs['file_output']:
            if output_configs['file_output_dir'] is None:
                dates_root_dir = os.getenv('DATES_ROOT_PATH')
                directory_name = '_filter_results_'
                tmp_dir = os.path.join(dates_root_dir, directory_name)
                # print("Output directory of the filter is not set. Results are saved in: '%s'" % tmp_dir)
                output_configs['file_output_dir'] = tmp_dir
            else:
                dates_root_dir = os.getenv('DATES_ROOT_PATH')
                if not str.startswith(output_configs['file_output_dir'],
                                      dates_root_dir):
                    output_configs['file_output_dir'] = os.path.join(
                        dates_root_dir, output_configs['file_output_dir'])

            if output_configs['file_output_variables'] is None:
                output_configs['file_output'] = False
            for var in output_configs['file_output_variables']:
                if var not in def_output_configs['file_output_variables']:
                    raise ValueError(
                        "Unrecognized variable to be saved is not recognized!"
                        "Received: %s" % var)
        else:
            output_configs['file_output_dir'] = None
            output_configs['file_output_variables'] = None

        return output_configs
Пример #7
0
    def __init__(self, filter_configs=None, output_configs=None):

        # aggregate configurations, and attach filter_configs, output_configs to the filter object.
        filter_configs = utility.aggregate_configurations(
            filter_configs, PF._def_local_filter_configs)
        output_configs = utility.aggregate_configurations(
            output_configs, PF._def_local_output_configs)

        #
        class OldStyle:
            pass

        if issubclass(OldStyle().__class__, object):
            # object-inherited class
            super().__init__(filter_configs=filter_configs,
                             output_configs=output_configs)
        else:
            # old-stype class
            super(PF, self).__init__(filter_configs=filter_configs,
                                     output_configs=output_configs)

        #
        # the following configuration are filter-specific.
        # validate the ensemble size
        if self.filter_configs['ensemble_size'] is None:
            try:
                forecast_ensemble_size = len(
                    self.filter_configs['forecast_ensemble'])
            except (ValueError, AttributeError, TypeError):
                forecast_ensemble_size = 0
            try:
                analysis_ensemble_size = len(
                    self.filter_configs['analysis_ensemble'])
            except (ValueError, AttributeError, TypeError):
                analysis_ensemble_size = 0

            self.sample_size = max(forecast_ensemble_size,
                                   analysis_ensemble_size)
            #
        else:
            self.sample_size = self.filter_configs['ensemble_size']

        # retrieve the observation vector size from model:
        self.observation_size = self.filter_configs[
            'model'].observation_vector_size()

        if self.filter_configs['localize_weights']:
            print(
                "Weight Localization in Particle Filter is not yet implemented."
            )
            raise NotImplementedError

        #
        #
        try:
            self._verbose = self.output_configs['verbose']
        except (AttributeError, NameError):
            self._verbose = False

        # the following configuration are filter-specific.
        likelihood_function = self.filter_configs['likelihood_function'].lower(
        ).strip()
        resampling_scheme = self.filter_configs['resampling_scheme'].lower(
        ).strip()
        #
        if resampling_scheme is not None:
            self._resample = True
            if resampling_scheme in ['systematic', 'stratified']:
                pass
            else:
                print("Ruesampling scheme %s is not supported!" %
                      self._resampling_scheme)
                raise ValueError
        else:
            self._resample = False

        # Initialized successfuly;
        self.__initialized = True
Пример #8
0
    def validate_output_configs(output_configs, def_output_configs):
        """
        Aggregate the passed dictionaries with default configurations then make sure parameters are consistent.
        The first argument (output_configs) is validated, updated with missing entries, and returned.

        Args:
            output_configs: dict,
                A dictionary containing output configurations. This should be the output_configs dict
                passed to the constructor.

            def_output_configs: dict,
                A dictionary containing the default output configurations.

        Returns:
            output_configs: dict,
                Same as the first argument (output_configs) but validated, adn updated with missing entries.

        """
        output_configs = utility.aggregate_configurations(
            output_configs, def_output_configs)
        # screen output
        if output_configs['scr_output']:
            # screen output is turned on, make sure iterations are positive
            if output_configs['scr_output_iter'] is not None:
                if isinstance(output_configs['scr_output_iter'], float):
                    output_configs['scr_output_iter'] = np.int(
                        output_configs['scr_output_iter'])
                if output_configs['scr_output_iter'] <= 0:
                    output_configs['scr_output'] = 1
                if not isinstance(output_configs['scr_output_iter'], int):
                    output_configs['scr_output'] = 1
        else:
            output_configs['scr_output_iter'] = np.infty  # just in case

        # file output
        if output_configs['file_output']:
            if output_configs['file_output_iter'] is not None:
                if isinstance(output_configs['file_output_iter'], float):
                    output_configs['file_output_iter'] = np.int(
                        output_configs['file_output_iter'])
                if output_configs['file_output_iter'] <= 0:
                    output_configs['file_output'] = 1
                if not isinstance(output_configs['file_output_iter'], int):
                    output_configs['file_output'] = 1
            #
            if output_configs['file_output_dir'] is None:
                output_configs[
                    'file_output_dir'] = 'Results/Filtering_Results'  # relative to DATeS directory of course
            # Create the full path of the output directory ( if only relative dir is passed)
            dates_root_dir = os.getenv('DATES_ROOT_PATH')
            if not str.startswith(output_configs['file_output_dir'],
                                  dates_root_dir):
                output_configs['file_output_dir'] = os.path.join(
                    dates_root_dir, output_configs['file_output_dir'])
            output_configs['file_output_dir'] = output_configs[
                'file_output_dir'].rstrip('/ ')

            if output_configs['file_output_variables'] is None:
                output_configs['file_output'] = False
            for var in output_configs['file_output_variables']:
                if var not in def_output_configs['file_output_variables']:
                    raise ValueError(
                        "Unrecognized variable to be saved is not recognized!"
                        "Received: %s" % var)
        else:
            output_configs['file_output_iter'] = np.infty  # just in case
            output_configs['file_output_dir'] = None
            output_configs['file_output_variables'] = None

        return output_configs
Пример #9
0
    def validate_assimilation_configs(assimilation_configs,
                                      def_assimilation_configs):
        """
        Aggregate the passed dictionaries with default configurations then make sure parameters are consistent.
        The first argument (assimilation_configs) is validated, updated with missing entries, and returned.

        Args:
            assimilation_configs: dict,
                A dictionary containing assimilation configurations. This should be the assimilation_configs dict
                passed to the constructor.

            def_assimilation_configs: dict,
                A dictionary containing the default assimilation configurations.

        Returns:
            assimilation_configs: dict,
                Same as the first argument (assimilation_configs) but validated, adn updated with missing entries.

        """
        assimilation_configs = utility.aggregate_configurations(
            assimilation_configs, def_assimilation_configs)
        # Since aggregate never cares about the contents, we need to make sure now all parameters are consistent
        for key in assimilation_configs:
            if key.lower() not in def_assimilation_configs:
                print(
                    "Caution: Unknown key detected: '%s'. Ignored and defaults are restored if necessary"
                    % key)
        if assimilation_configs['filter'] is None:
            raise ValueError(
                "You have to create a filter object and attach it here so that "
                "I can use it for sequential DA!")
        else:
            if not isinstance(assimilation_configs['filter'], FiltersBase):
                raise ValueError(
                    "Passed filter is not an instance of 'FiltersBase'!. Passed: %s"
                    % repr(assimilation_configs['filter']))

        try:
            model = assimilation_configs['filter'].filter_configs['model']
        except (AttributeError, KeyError, NameError):
            try:
                model = assimilation_configs['filter'].model
            except (AttributeError, KeyError, NameError):
                print(
                    "Could not retrieve a reference to a valid model object from the passed filter object!"
                )
                raise AttributeError
        #
        if not isinstance(model, ModelsBase):
            raise ValueError(
                "The model retrieved from the passed filter object is not an instance of 'ModelsBase'!. Passed: %s"
                % repr(assimilation_configs['model']))

        # check the reference initial condition and the reference initial time
        if assimilation_configs[
                'ref_initial_condition'] is None or assimilation_configs[
                    'ref_initial_time'] is None:
            print(
                "You didn't pass a reference initial state, and reference initial time."
            )
            print(
                "This indicates, you will provide a list of observations to use as real data."
            )
            print(
                "Please call filtering_process.recursive_assimilation_process() with the following arguments:"
            )
            print("observations, obs_checkpoints, da_checkpoints")
            # raise ValueError("Both the reference initial condition and the initial time must be passed!")

        # Check for observation and assimilation checkpoints and update synchronous accordingly
        # if assimilation_configs['da_checkpoints'] is not None and assimilation_configs['obs_checkpoints'] is not None:
        if assimilation_configs['obs_checkpoints'] is not None:
            # observation checkpoints is built in full.
            if isinstance(assimilation_configs['obs_checkpoints'],
                          int) or isinstance(
                              assimilation_configs['obs_checkpoints'], float):
                assimilation_configs['obs_checkpoints'] = [
                    assimilation_configs['obs_checkpoints']
                ]
            try:
                obs_checkpoints = np.asarray(
                    assimilation_configs['obs_checkpoints'])
                assimilation_configs['obs_checkpoints'] = obs_checkpoints
                num_observations = np.size(obs_checkpoints)
            except:
                raise ValueError(
                    "Couldn't cast the observation checkpoints into np.ndarray. "
                    "This mostly means you didn't pass an iterable!"
                    "Passed: %s" %
                    str(assimilation_configs['obs_checkpoints']))

            # Now check the assimilation checkpoints
            if assimilation_configs['da_checkpoints'] is not None:
                if isinstance(assimilation_configs['da_checkpoints'],
                              int) or isinstance(
                                  assimilation_configs['da_checkpoints'],
                                  float):
                    assimilation_configs['da_checkpoints'] = [
                        assimilation_configs['da_checkpoints']
                    ]
                try:
                    da_checkpoints = np.asarray(
                        assimilation_configs['da_checkpoints'])
                    assimilation_configs['da_checkpoints'] = da_checkpoints
                    num_assimilation_cycles = np.size(da_checkpoints)
                except:
                    raise ValueError(
                        "Couldn't cast the assimilation checkpoints into np.ndarray. "
                        "This mostly means you didn't pass an iterable!"
                        "Passed: %s" %
                        repr(assimilation_configs['da_checkpoints']))

                if num_assimilation_cycles != num_observations:
                    raise ValueError(
                        "Number of observations and number of assimilation cycles must match!\n"
                        "Number of assimilation cycles passed = %d\n"
                        "Number of observation time points = %d" %
                        (num_assimilation_cycles, num_observations))
                else:
                    # We are all good to go now. now check if the assimilation should be synchronous or not
                    test_bool = assimilation_configs[
                        'obs_checkpoints'] != assimilation_configs[
                            'da_checkpoints']
                    if isinstance(test_bool, list) or isinstance(
                            test_bool, np.ndarray):
                        if (test_bool).any():
                            assimilation_configs['synchronous'] = False
                        else:
                            assimilation_configs['synchronous'] = True
                    elif isinstance(
                            test_bool, bool
                    ):  # this was supposed to handle single entries. I made sure all are converted to arrays. REM...
                        if test_bool:
                            assimilation_configs['synchronous'] = False
                        else:
                            assimilation_configs['synchronous'] = True
                    else:
                        raise AssertionError(" Unexpected comparison results!")
            else:
                assimilation_configs['da_checkpoints'] = assimilation_configs[
                    'obs_checkpoints']
                assimilation_configs['synchronous'] = True  # No matter what!

            assimilation_configs['da_time_spacing'] = None
            if assimilation_configs['ref_initial_time'] > np.min(
                    assimilation_configs['da_checkpoints']):
                raise ValueError(
                    "Some observation times or assimilation times are set before "
                    "the time of the reference initial condition!")
            elif assimilation_configs[
                    'ref_initial_time'] == assimilation_configs[
                        'da_checkpoints'][0]:
                assimilation_configs['num_filtering_cycles'] = np.size(
                    assimilation_configs['da_checkpoints']) - 1
            else:
                assimilation_configs['num_filtering_cycles'] = np.size(
                    assimilation_configs['da_checkpoints'])

            # check the first observation time against the reference initial time...
            if assimilation_configs['ref_initial_time'] > np.min(
                    assimilation_configs['da_checkpoints']):
                raise ValueError(
                    "Some observation times or assimilation times are set before "
                    "the time of the reference initial condition!")
        #
        else:
            # No valid checkpoints are passed.
            # All are created based on the given spacing and assume filtering is synchronous
            if assimilation_configs[
                    'num_filtering_cycles'] is None or assimilation_configs[
                        'da_time_spacing'] is None:
                raise ValueError(
                    "da_checkpoints and obs_checkpoints are not provided. "
                    "The alternatives: da_time_spacing and num_filtering_cycles are not provided either!"
                    "Filtering process needs one of the two alternatives!")
            else:
                ref_initial_time = assimilation_configs['ref_initial_time']
                da_time_spacing = assimilation_configs['da_time_spacing']
                num_da_cycles = assimilation_configs['num_filtering_cycles']
                eps = 1e-16
                assimilation_configs['obs_checkpoints'] = np.arange(
                    ref_initial_time,
                    ref_initial_time + da_time_spacing * num_da_cycles + eps,
                    da_time_spacing)
                assimilation_configs['da_checkpoints'] = assimilation_configs[
                    'obs_checkpoints']
                assimilation_configs['synchronous'] = True

        return assimilation_configs