def partial_vectors(dset, estimator_config_key): """Call all partials specified in the configuration and set up the corresponding state vector The list of partials to calculate is taken from the config file of the given technique. Each partial calculator is passed a :class:`~where.data.dataset.Dataset` with data for the modelrun and should return a tuple with the partial vectors and their names. Args: dset (Dataset): A Dataset containing model run data. estimator_config_key (String): Key in config file with the name of the estimator. Returns: Dict: List of names of the partial derivatives for each partial config key. """ partial_vectors = dict() prefix = dset.vars["pipeline"] # Delete values from previous iterations if "partial" in dset.fields: del dset.partial for config_key in estimators.partial_config_keys(estimator_config_key): partial_vectors[config_key] = list() partials = config.tech[config_key].list partial_data = plugins.call_all(package_name=__name__, plugins=partials, prefix=prefix, dset=dset) for param, (data, names, data_unit) in partial_data.items(): param_unit_cfg = config.tech[param].unit if not param_unit_cfg.str: log.fatal( f"No unit given for parameter {param!r} in {param_unit_cfg.source}" ) display_unit = config.tech[param].display_unit.str display_unit = param_unit_cfg.str if not display_unit else display_unit partial_unit_str = f"{dset.unit('calc')[0]} / ({param_unit_cfg.str})" partial_unit = str(Unit(partial_unit_str).u) factor = Unit(data_unit, partial_unit) for values, name in zip(data.T, names): partial_name = f"{param}-{name}" if name else f"{param}" partial_vectors[config_key].append(partial_name) field_name = f"partial.{partial_name}" dset.add_float(field_name, val=values * factor, unit=partial_unit, write_level="operational") dset.meta.add(partial_name, display_unit, section="display_units") return partial_vectors
def apply_postprocessors(config_key, dset): """Apply postprocessors for a given session Args: config_key (String): The configuration key listing which postprocessors to apply. dset (Dataset): Dataset containing analysis data. """ prefix = dset.vars["pipeline"] postprocessors = config.tech[config_key].list log.info(f"Applying postprocessors") return plugins.call_all(package_name=__name__, plugins=postprocessors, prefix=prefix, dset=dset)
def get(dset, param_names): """Call an .. Args: dset (Dataset): Model run data. param_names: Names of parameters to estimate """ constraints = config.tech["estimate_constraints"].list constraints = plugins.call_all(package_name=__name__, plugins=constraints, prefix="todo", dset=dset, param_names=param_names) h = np.concatenate([c[0] for c in constraints.values()]) sigma = np.concatenate([c[1] for c in constraints.values()]) import IPython IPython.embed() return h, sigma
def apply_removers(config_key: str, dset: "Dataset") -> None: """Apply all removers for a given session Args: config_key: The configuration key listing which removers to apply. dset: Dataset containing analysis data. """ prefix = dset.vars["pipeline"] removers = config.tech[config_key].list log.info(f"Applying removers") keep_idxs = plugins.call_all(package_name=__name__, plugins=removers, prefix=prefix, dset=dset) all_keep_idx = np.ones(dset.num_obs, dtype=bool) for remover, remover_keep_idx in keep_idxs.items(): log.info(f"Removing {sum(np.logical_not(remover_keep_idx)):5d} observations based on {remover}") all_keep_idx = np.logical_and(all_keep_idx, remover_keep_idx) log.info(f"Keeping {sum(all_keep_idx)} of {dset.num_obs} observations") dset.subset(all_keep_idx) if dset.num_obs == 0: log.fatal("No observations are available.")
def apply_outlier_detectors(config_key: str, dset: "Dataset") -> np.ndarray: """Apply all outlier detectors for a given session Args: config_key: The configuration key listing which detectors to apply. dset: Dataset containing analysis data. """ prefix = dset.vars["pipeline"] detectors = config.tech[config_key].list log.info(f"Apply outlier detectors") keep_idxs = plugins.call_all(package_name=__name__, plugins=detectors, prefix=prefix, dset=dset) all_keep_idx = np.ones(dset.num_obs, dtype=bool) for detector, detector_keep_idx in keep_idxs.items(): log.info( f"Detecting {sum(~detector_keep_idx):5d} outliers based on {detector}" ) all_keep_idx = np.logical_and(all_keep_idx, detector_keep_idx) log.info(f"Removing {sum(~all_keep_idx)} of {dset.num_obs} observations") return all_keep_idx
def test_all_plugins(plugin_package): """Test that call_all calls all plugins""" results = plugins.call_all(plugin_package) assert isinstance(results, dict) assert len(results) > 1
def calculate(config_key, dset): prefix = dset.vars["pipeline"] return plugins.call_all(package_name=__name__, plugins=config.tech[config_key].list, prefix=prefix, dset=dset)