示例#1
0
    def test_parallel_objective(self):
        # check that a parallel objective works without issue
        # (it could be possible that parallel evaluation fails at a higher
        #  level in e.g. emcee or in scipy.optimize.differential_evolution)
        model = self.model361
        model.threads = 2

        objective = Objective(
            model,
            (self.qvals361, self.rvals361, self.evals361),
            transform=Transform("logY"),
        )
        p0 = np.array(objective.varying_parameters())
        cov = objective.covar()
        walkers = np.random.multivariate_normal(np.atleast_1d(p0),
                                                np.atleast_2d(cov),
                                                size=(100))
        map_logl = np.array(list(map(objective.logl, walkers)))
        map_chi2 = np.array(list(map(objective.chisqr, walkers)))

        wf = Wrapper_fn2(model.model, p0)
        map_mod = np.array(list(map(wf, walkers)))

        with MapWrapper(2) as g:
            mapw_mod = g(wf, walkers)
            mapw_logl = g(objective.logl, walkers)
            mapw_chi2 = g(objective.chisqr, walkers)
        assert_allclose(mapw_logl, map_logl)
        assert_allclose(mapw_chi2, map_chi2)
        assert_allclose(mapw_mod, map_mod)
示例#2
0
    def test_multidimensionality(self):
        # Check that ND data can be used with an objective/model/data
        # (or at least it doesn't stand in the way)
        rng = np.random.default_rng()
        x = rng.uniform(size=100).reshape(50, 2)

        desired = line_ND(x, self.p)
        assert desired.shape == (50, 2)
        data = Data1D((x, desired))

        model = Model(self.p, fitfunc=line_ND)
        y = model(x)
        assert_allclose(y, desired)

        objective = Objective(model, data)
        assert_allclose(objective.chisqr(), 0)
        assert_allclose(objective.generative(), desired)
        assert_allclose(objective.residuals(), 0)
        assert objective.residuals().shape == (50, 2)

        objective.logl()
        objective.logpost()
        covar = objective.covar()
        assert covar.shape == (2, 2)
示例#3
0
    def test_covar(self):
        # checks objective.covar against optimize.least_squares covariance.
        path = os.path.dirname(os.path.abspath(__file__))

        theoretical = np.loadtxt(os.path.join(path, 'gauss_data.txt'))
        xvals, yvals, evals = np.hsplit(theoretical, 3)
        xvals = xvals.flatten()
        yvals = yvals.flatten()
        evals = evals.flatten()

        p0 = np.array([0.1, 20., 0.1, 0.1])
        names = ['bkg', 'A', 'x0', 'width']
        bounds = [(-1, 1), (0, 30), (-5., 5.), (0.001, 2)]

        params = Parameters(name="gauss_params")
        for p, name, bound in zip(p0, names, bounds):
            param = Parameter(p, name=name)
            param.range(*bound)
            param.vary = True
            params.append(param)

        model = Model(params, fitfunc=gauss)
        data = Data1D((xvals, yvals, evals))
        objective = Objective(model, data)

        # first calculate least_squares jac/hess/covariance matrices
        res = least_squares(objective.residuals,
                            np.array(params),
                            jac='3-point')

        hess_least_squares = np.matmul(res.jac.T, res.jac)
        covar_least_squares = np.linalg.inv(hess_least_squares)

        # now calculate corresponding matrices by hand, to see if the approach
        # concurs with least_squares
        objective.setp(res.x)
        _pvals = np.array(res.x)

        def residuals_scaler(vals):
            return np.squeeze(objective.residuals(_pvals * vals))

        jac = approx_derivative(residuals_scaler, np.ones_like(_pvals))
        hess = np.matmul(jac.T, jac)
        covar = np.linalg.inv(hess)

        covar = covar * np.atleast_2d(_pvals) * np.atleast_2d(_pvals).T

        assert_allclose(covar, covar_least_squares)

        # check that objective.covar corresponds to the least_squares
        # covariance matrix
        objective.setp(res.x)
        _pvals = np.array(res.x)
        covar_objective = objective.covar()
        assert_allclose(covar_objective, covar_least_squares)

        # now see what happens with a parameter that has no effect on residuals
        param = Parameter(1.234, name='dummy')
        param.vary = True
        params.append(param)

        from pytest import raises
        with raises(LinAlgError):
            objective.covar()