Exemple #1
0
def test_mode():
    """
    Tests the maximum/optimum for priors in different scales...
    """

    scales = ['lin', 'log', 'log10']
    prior_types = ['normal', 'laplace', 'logNormal']

    problem_dict = {
        'lin': {
            'lb': [0],
            'ub': [10],
            'opt': [1]
        },
        'log': {
            'lb': [-3],
            'ub': [3],
            'opt': [0]
        },
        'log10': {
            'lb': [-3],
            'ub': [2],
            'opt': [0]
        }
    }

    for prior_type, scale in itertools.product(prior_types, scales):

        prior_list = [get_parameter_prior_dict(0, prior_type, [1, 1], scale)]

        test_prior = NegLogParameterPriors(prior_list)
        test_problem = pypesto.Problem(test_prior,
                                       lb=problem_dict[scale]['lb'],
                                       ub=problem_dict[scale]['ub'],
                                       dim_full=1,
                                       x_scales=[scale])

        optimizer = pypesto.optimize.ScipyOptimizer(method='Nelder-Mead')

        result = pypesto.optimize.minimize(problem=test_problem,
                                           optimizer=optimizer,
                                           n_starts=10)

        assert np.isclose(result.optimize_result.list[0]['x'],
                          problem_dict[scale]['opt'],
                          atol=1e-04)

    # test uniform distribution:
    for scale in scales:
        prior_dict = get_parameter_prior_dict(0, 'uniform', [1, 2], scale)

        # check inside and outside of interval
        assert abs(prior_dict['density_fun'](lin_to_scaled(.5, scale)) -
                   0) < 1e-8

        assert abs(prior_dict['density_fun'](lin_to_scaled(1.5, scale)) -
                   math.log(1)) < 1e-8

        assert abs(prior_dict['density_fun'](lin_to_scaled(2.5, scale)) -
                   0) < 1e-8
Exemple #2
0
def test_derivatives(prior_type_list, scale):
    """
    Tests the finite gradients and second order derivatives.
    """

    prior_list = [
        get_parameter_prior_dict(
            iprior,
            prior_type,
            [-1, 1]
            if prior_type in ['uniform', 'parameterScaleUniform'] else [1, 1],
            scale,
        ) for iprior, prior_type in enumerate(prior_type_list)
    ]

    test_prior = NegLogParameterPriors(prior_list)

    # use this x0, since it is a moderate value both in linear
    # and in log scale...
    x0 = np.array([lin_to_scaled(0.5, scale)] * len(prior_list))

    multi_eps = [1e-3]
    assert test_prior.check_gradients_match_finite_differences(
        x=x0, mode=MODE_FUN, multi_eps=multi_eps)
    assert test_prior.check_gradients_match_finite_differences(
        x=x0, mode=MODE_FUN, order=1, multi_eps=multi_eps)

    if test_prior.has_res:
        test_prior.check_gradients_match_finite_differences(
            x=x0, mode=MODE_RES, multi_eps=multi_eps)
Exemple #3
0
def test_derivatives():
    """
    Tests the finite gradients and second order derivatives.
    """

    scales = ['lin', 'log', 'log10']
    prior_types = ['uniform', 'normal', 'laplace', 'logNormal']

    for prior_type, scale in itertools.product(prior_types, scales):

        if prior_type == 'uniform':
            prior_parameters = [-1, 1]
        else:
            prior_parameters = [1, 1]

        prior_dict = get_parameter_prior_dict(0, prior_type, prior_parameters,
                                              scale)

        # use this x0, since it is a moderate value both in linear
        # and in log scale...
        x0 = np.array([0.5])

        err_grad = opt.check_grad(prior_dict['density_fun'],
                                  prior_dict['density_dx'], x0)
        err_hes = opt.check_grad(prior_dict['density_dx'],
                                 prior_dict['density_ddx'], x0)

        assert err_grad < 1e-3
        assert err_hes < 1e-3
Exemple #4
0
def _test_evaluate_prior(struct):
    x = struct['x']
    prior_list = [get_parameter_prior_dict(0, 'normal', [0, 1], 'lin')]
    obj = pypesto.objective.AggregatedObjective(
        [struct['obj'], NegLogParameterPriors(prior_list)])
    for mode, max_sensi_order in zip([MODE_RES, MODE_FUN], [1, 2]):
        sensi_orders = range(max_sensi_order + 1)
        for num_orders in range(len(sensi_orders)):
            for sensi_order in itt.combinations(sensi_orders, num_orders):
                obj(x, sensi_order, mode=mode)
Exemple #5
0
def test_mode(scale, prior_type):
    """
    Tests the maximum/optimum for priors in different scales...
    """

    problem_dict = {'lin': {'lb': [0], 'ub': [10], 'opt': [1]},
                    'log': {'lb': [-3], 'ub': [3], 'opt': [0]},
                    'log10': {'lb': [-3], 'ub': [2], 'opt': [0]}}

    prior_list = [get_parameter_prior_dict(
        0, prior_type, [1, 1], scale)]

    test_prior = NegLogParameterPriors(prior_list)
    test_problem = pypesto.Problem(test_prior,
                                   lb=problem_dict[scale]['lb'],
                                   ub=problem_dict[scale]['ub'],
                                   dim_full=1,
                                   x_scales=[scale])

    if prior_type.startswith('parameterScale'):
        scale = 'lin'

    optimizer = pypesto.optimize.ScipyOptimizer(method='Nelder-Mead')

    result = pypesto.optimize.minimize(
        problem=test_problem, optimizer=optimizer, n_starts=10)

    # test uniform distribution:
    if prior_type in ['uniform', 'parameterScaleUniform']:
        # check inside and outside of interval
        prior = prior_list[0]['density_fun']
        assert np.isclose(prior(lin_to_scaled(.5, scale)), 0)
        assert np.isclose(prior(lin_to_scaled(1.5, scale)), math.log(1))
        assert np.isclose(prior(lin_to_scaled(2.5, scale)), 0)

    else:
        # flat functions don't have local minima, so dont check this for
        # uniform priors
        assert np.isclose(result.optimize_result.list[0]['x'],
                          problem_dict[scale]['opt'], atol=1e-04)
Exemple #6
0
def test_derivatives(prior_type, scale):
    """
    Tests the finite gradients and second order derivatives.
    """

    if prior_type in ['uniform', 'parameterScaleUniform']:
        prior_parameters = [-1, 1]
    else:
        prior_parameters = [1, 1]

    prior_dict = get_parameter_prior_dict(
        0, prior_type, prior_parameters, scale)

    # use this x0, since it is a moderate value both in linear
    # and in log scale...
    x0 = np.array([0.5])

    err_grad = opt.check_grad(prior_dict['density_fun'],
                              prior_dict['density_dx'], x0)
    err_hes = opt.check_grad(prior_dict['density_dx'],
                             prior_dict['density_ddx'], x0)

    assert err_grad < 1e-3
    assert err_hes < 1e-3
Exemple #7
0
def test_mode(scale, prior_type_list):
    """
    Tests the maximum/optimum for priors in different scales...
    """

    problem_dict = {
        'lin': {
            'lb': 0,
            'ub': 3,
            'opt': 1
        },
        'log': {
            'lb': -3,
            'ub': 3,
            'opt': 0
        },
        'log10': {
            'lb': -3,
            'ub': 2,
            'opt': 0
        },
    }

    prior_list = [
        get_parameter_prior_dict(
            iprior,
            prior_type,
            [1, 2]
            if prior_type in ['uniform', 'parameterScaleUniform'] else [1, 1],
            scale,
        ) for iprior, prior_type in enumerate(prior_type_list)
    ]
    ubs = np.asarray([problem_dict[scale]['ub'] for _ in prior_type_list])
    lbs = np.asarray([problem_dict[scale]['lb'] for _ in prior_type_list])

    test_prior = NegLogParameterPriors(prior_list)
    test_problem = pypesto.Problem(
        test_prior,
        lb=lbs,
        ub=ubs,
        dim_full=len(prior_type_list),
        x_scales=[scale for _ in prior_type_list],
    )

    topt = []
    # test uniform distribution:
    for prior_type, prior in zip(prior_type_list, prior_list):
        if prior_type.startswith('parameterScale'):
            scale = 'lin'
        if prior_type in ['uniform', 'parameterScaleUniform']:
            # check inside and outside of interval
            funprior = prior['density_fun']
            assert np.isinf(funprior(lin_to_scaled(0.5, scale)))
            assert np.isclose(funprior(lin_to_scaled(1.5, scale)), math.log(1))
            assert np.isinf(funprior(lin_to_scaled(2.5, scale)))
            resprior = prior['residual']
            assert np.isinf(resprior(lin_to_scaled(0.5, scale)))
            assert np.isclose(resprior(lin_to_scaled(1.5, scale)), 0)
            assert np.isinf(resprior(lin_to_scaled(2.5, scale)))
            topt.append(np.nan)
        else:
            topt.append(problem_dict[scale]['opt'])

        if prior_type.endswith('logNormal'):
            assert not test_prior.has_res
            assert not test_prior.has_sres

    topt = np.asarray(topt)

    # test log-density based and residual representation
    if any(~np.isnan(topt)):
        for method in ['L-BFGS-B', 'ls_trf']:
            if method == 'ls_trf' and not test_prior.has_res:
                continue
            optimizer = pypesto.optimize.ScipyOptimizer(method=method)
            startpoints = pypesto.startpoint.UniformStartpoints(
                check_fval=True, )
            result = pypesto.optimize.minimize(
                problem=test_problem,
                optimizer=optimizer,
                n_starts=10,
                startpoint_method=startpoints,
                filename=None,
            )

            # flat functions don't have local minima, so dont check this
            # for uniform priors

            num_optim = result.optimize_result.list[0]['x'][~np.isnan(topt)]
            assert np.isclose(num_optim, topt[~np.isnan(topt)],
                              atol=1e-03).all()