Пример #1
0
def optimize_task(initial_parameter, scheme, verbose):

    problem_bag, groups = _create_problem_bag(scheme)

    minimizer = lmfit.Minimizer(calculate_penalty,
                                initial_parameter,
                                fcn_args=[scheme, problem_bag, groups],
                                fcn_kws=None,
                                iter_cb=None,
                                scale_covar=True,
                                nan_policy='omit',
                                reduce_fcn=None,
                                **{})
    verbose = 2 if verbose else 0
    lm_result = minimizer.minimize(method='least_squares',
                                   verbose=verbose,
                                   max_nfev=scheme.nfev)

    parameter = ParameterGroup.from_parameter_dict(lm_result.params)
    datasets = _create_result(scheme, parameter)
    covar = lm_result.covar if hasattr(lm_result, 'covar') else None

    return Result(scheme, datasets, parameter, lm_result.nfev,
                  lm_result.nvarys, lm_result.ndata, lm_result.nfree,
                  lm_result.chisqr, lm_result.redchi, lm_result.var_names,
                  covar)
Пример #2
0
    def _calculate_penalty(self, parameter):

        if not isinstance(parameter, ParameterGroup):
            parameter = ParameterGroup.from_parameter_dict(parameter)

        job = self._create_calculate_penalty_job(parameter)

        return job.compute()
Пример #3
0
def calculate_penalty(parameter, scheme, bag, groups):
    parameter = ParameterGroup.from_parameter_dict(parameter)
    residual_function = residual_nnls if scheme.nnls else residual_variable_projection
    if scheme.model.grouped():
        if scheme.model.index_dependend():
            _, _, constraint_labels_and_matrices = \
                create_index_dependend_grouped_matrix_jobs(
                    scheme, bag, parameter
                )
            _, _, _, penalty = \
                residual_calculation.create_index_dependend_grouped_residual(
                    scheme, parameter, bag, constraint_labels_and_matrices, residual_function
                )
        else:

            _, _, constraint_labels_and_matrices = \
                calculate_index_independend_grouped_matrices(scheme, groups, parameter)

            _, _, _, penalty = \
                residual_calculation.create_index_independend_grouped_residual(
                    scheme, parameter, bag, constraint_labels_and_matrices, residual_function
                )
    else:
        if scheme.model.index_dependend():
            _, _, constraint_labels_and_matrices = \
                create_index_dependend_ungrouped_matrix_jobs(
                    scheme, bag, parameter
                )
            _, _, _, penalty = \
                residual_calculation.create_index_dependend_ungrouped_residual(
                    scheme, parameter, bag, constraint_labels_and_matrices, residual_function
                )
        else:

            _, _, constraint_labels_and_matrices = \
                calculate_index_independend_ungrouped_matrices(scheme, parameter)

            _, _, _, penalty = \
                residual_calculation.create_index_independend_ungrouped_residual(
                    scheme, parameter, bag, constraint_labels_and_matrices, residual_function
                )
    penalty = penalty.compute()
    return penalty
Пример #4
0
def test_non_negative():

    params = """
    - ["neg", -1]
    - ["negmax", -1, {max=0}]
    - ["nonneg1", 1, {non-negative: True}]
    - ["nonneg2", 2, {non-negative: True}]
    - ["nonnegmin", 6, {non-negative: True, min: 2}]
    """
    params = ParameterGroup.from_yaml(params)
    result = ParameterGroup.from_parameter_dict(params.as_parameter_dict())
    print(params)
    params.as_parameter_dict().pretty_print()
    print(result)

    for label, p in params.all():
        print(label)
        r = result.get(label)
        assert r.non_neg == p.non_neg
        assert np.allclose(r.value, p.value)
        assert np.allclose(r.min, p.min)
        assert np.allclose(r.max, p.max)
Пример #5
0
def test_non_negative():

    params = """
    - ["neg", -1]
    - ["negmax", -1, {max=0}]
    - ["nonneg1", 1, {non-negative: True}]
    - ["nonneg2", 2, {non-negative: True}]
    - ["nonnegmin", 6, {non-negative: True, min: 2}]
    """
    params = ParameterGroup.from_yaml(params)
    result = ParameterGroup.from_parameter_dict(params.as_parameter_dict())
    print(params)
    params.as_parameter_dict().pretty_print()
    print(result)

    for label, p in params.all():
        print(label)
        r = result.get(label)
        assert r.non_neg == p.non_neg
        assert np.allclose(r.value, p.value)
        assert np.allclose(r.min, p.min)
        assert np.allclose(r.max, p.max)
Пример #6
0
    def optimize(self, verbose=True):
        parameter = self._scheme.parameter.as_parameter_dict()
        minimizer = lmfit.Minimizer(
            self._calculate_penalty,
            parameter,
            fcn_args=None,
            fcn_kws=None,
            iter_cb=None,
            scale_covar=True,
            nan_policy='omit',
            reduce_fcn=None,
            **{})
        verbose = 2 if verbose else 0
        lm_result = minimizer.minimize(
            method='least_squares', verbose=verbose, max_nfev=self._scheme.nfev)

        self._optimal_parameter = ParameterGroup.from_parameter_dict(lm_result.params)
        self._calculate_result()

        covar = lm_result.covar if hasattr(lm_result, 'covar') else None

        return Result(self._scheme, self._result_data, self._optimal_parameter,
                      lm_result.nfev, lm_result.nvarys, lm_result.ndata, lm_result.nfree,
                      lm_result.chisqr, lm_result.redchi, lm_result.var_names, covar)
Пример #7
0
 def optimized_parameter(self) -> ParameterGroup:
     """The optimized parameters."""
     if self._lm_result is None:
         return self.initial_parameter
     return ParameterGroup.from_parameter_dict(self._lm_result.params)
Пример #8
0
def calculate_residual(parameter: typing.Union[ParameterGroup,
                                               lmfit.Parameters],
                       result: 'glotaran.analysis.Result') -> np.ndarray:
    """Calculates the residual and fills the global analysis result with data.

    Parameters
    ----------
    parameter :
        The parameter for optimization.
    result :
        The global analysis result.
    """

    if not isinstance(parameter, ParameterGroup):
        parameter = ParameterGroup.from_parameter_dict(parameter)

    penalty = []
    for index, item in result.groups.items():
        clp_labels, matrix = calculate_group_item(item, result.model,
                                                  parameter, result.data)

        for i, row in enumerate(matrix.T):
            if not np.isfinite(matrix).all():
                raise Exception(f"Matrix is not finite at clp {clp_labels[i]}"
                                f"\n\nCurrent Parameter:\n\n{parameter}")

        clp = None
        residual = None
        if result.nnls:
            clp, residual = residual_nnls(matrix, result.data_groups[index])
        else:
            clp, residual = residual_variable_projection(
                matrix, result.data_groups[index])

        result.global_clp[index] = xr.DataArray(clp,
                                                coords=[('clp_label',
                                                         clp_labels)])

        start = 0
        for i, dataset in item:
            dataset = result._data[dataset.label]
            if 'residual' not in dataset:
                dataset['residual'] = dataset.data.copy()
            end = dataset.coords[result.model.matrix_dimension].size + start
            dataset.residual.loc[{
                result.model.global_dimension: i
            }] = residual[start:end]
            start = end

            if 'clp' not in dataset:
                dim1 = dataset.coords[result.model.global_dimension].size
                dim2 = dataset.coords['clp_label'].size
                dataset['clp'] = ((result.model.global_dimension, 'clp_label'),
                                  np.zeros((dim1, dim2), dtype=np.float64))
            dataset.clp.loc[{result.model.global_dimension: i}] = \
                np.array([clp[clp_labels.index(i)] if i in clp_labels else None
                          for i in dataset.coords['clp_label'].values])

        if callable(result.model._additional_penalty_function):
            additionals = result.model._additional_penalty_function(
                parameter, clp_labels, clp, matrix, parameter)
            residual = np.concatenate((residual, additionals))

        penalty.append(residual)

    return np.concatenate(penalty)