Пример #1
0
    def test_logprior(self):
        t0 = [10.0]

        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        lp_test = lpost.logprior(t0)
        lp = np.log(scipy.stats.norm(self.countrate, self.cerr).pdf(t0))
        assert lp == lp_test
Пример #2
0
    def test_counts_are_nan(self):
        y = np.nan * np.ones(self.x.shape[0])

        t0 = [10.0]
        self.model.amplitude = t0[0]

        lpost = GaussianPosterior(self.x, y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        assert np.isclose(lpost(t0), logmin, 1e-5)
Пример #3
0
    def test_negative_loglikelihood(self):
        t0 = [10.0]
        self.model.amplitude = t0[0]
        mean_model = self.model(self.x)

        loglike = -np.sum(-0.5*np.log(2.*np.pi) - np.log(self.yerr) - \
                         0.5*((self.y - mean_model)/self.yerr)**2.0)

        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        loglike_test = lpost.loglikelihood(t0, neg=True)

        assert np.isclose(loglike, loglike_test)
Пример #4
0
    def test_negative_loglikelihood(self):
        t0 = [10.0]
        self.model.amplitude = t0[0]
        mean_model = self.model(self.x)

        loglike = -np.sum(-0.5*np.log(2.*np.pi) - np.log(self.yerr) - \
                         0.5*((self.y - mean_model)/self.yerr)**2.0)

        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        loglike_test = lpost.loglikelihood(t0, neg=True)

        assert np.isclose(loglike, loglike_test)
Пример #5
0
    def test_negative_posterior(self):
        t0 = [10.0]
        self.model.amplitude = t0[0]
        mean_model = self.model(self.x)

        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        post_test = lpost(t0, neg=True)

        loglike = np.sum(-0.5*np.log(2.*np.pi) - np.log(self.yerr) - \
                         0.5*((self.y - mean_model)/self.yerr)**2.0)
        logprior = np.log(scipy.stats.norm(self.countrate, self.cerr).pdf(t0))

        post = -loglike - logprior

        assert np.isclose(post_test, post, atol=1.e-10)
Пример #6
0
    def test_negative_posterior(self):
        t0 = [10.0]
        self.model.amplitude = t0[0]
        mean_model = self.model(self.x)

        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        post_test = lpost(t0, neg=True)

        loglike = np.sum(-0.5*np.log(2.*np.pi) - np.log(self.yerr) - \
                         0.5*((self.y - mean_model)/self.yerr)**2.0)
        logprior = np.log(scipy.stats.norm(self.countrate, self.cerr).pdf(t0))

        post = -loglike - logprior

        assert np.isclose(post_test, post, atol=1.e-10)
Пример #7
0
    def test_correct_number_of_parameters(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        with pytest.raises(IncorrectParameterError):
            lpost([2, 3])
Пример #8
0
    def test_making_posterior(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        assert lpost.x.all() == self.x.all()
        assert lpost.y.all() == self.y.all()
Пример #9
0
    def test_logprior_fails_without_prior(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)

        with pytest.raises(AttributeError):
            lpost.logprior([10])
Пример #10
0
def fit_crossspectrum(cs,
                      model,
                      starting_pars=None,
                      max_post=False,
                      priors=None,
                      fitmethod="L-BFGS-B"):
    """
    Fit a number of Lorentzians to a cross spectrum, possibly including white
    noise. Each Lorentzian has three parameters (amplitude, centroid position,
    full-width at half maximum), plus one extra parameter if the white noise
    level should be fit as well. Priors for each parameter can be included in
    case `max_post = True`, in which case the function will attempt a
    Maximum-A-Posteriori fit. Priors must be specified as a dictionary with one
    entry for each parameter.
    The parameter names are `(amplitude_i, x_0_i, fwhm_i)` for each `i` out of
    a total of `N` Lorentzians. The white noise level has a parameter
    `amplitude_(N+1)`. For example, a model with two Lorentzians and a
    white noise level would have parameters:
    [amplitude_0, x_0_0, fwhm_0, amplitude_1, x_0_1, fwhm_1, amplitude_2].

    Parameters
    ----------
    cs : Crossspectrum
        A Crossspectrum object with the data to be fit

    model: astropy.modeling.models class instance
        The parametric model supposed to represent the data. For details
        see the astropy.modeling documentation

    starting_pars : iterable, optional, default None
        The list of starting guesses for the optimizer. If it is not provided,
        then default parameters are taken from `model`. See explanation above
        for ordering of parameters in this list.

    max_post : bool, optional, default False
        If True, perform a Maximum-A-Posteriori fit of the data rather than a
        Maximum Likelihood fit. Note that this requires priors to be specified,
        otherwise this will cause an exception!

    priors : {dict | None}, optional, default None
        Dictionary with priors for the MAP fit. This should be of the form
        {"parameter name": probability distribution, ...}

    fitmethod : string, optional, default "L-BFGS-B"
        Specifies an optimization algorithm to use. Supply any valid option for
        `scipy.optimize.minimize`.

    Returns
    -------
    parest : PSDParEst object
        A PSDParEst object for further analysis

    res : OptimizationResults object
        The OptimizationResults object storing useful results and quantities
        relating to the fit
    """
    if not (isinstance(starting_pars, np.ndarray)
            or isinstance(starting_pars, list)):
        starting_pars = model.parameters
    if priors:
        lgauss = GaussianPosterior(cs.freq, np.abs(cs.power), cs.power_err,
                                   model, priors)
    else:
        lgauss = GaussianLogLikelihood(cs.freq,
                                       np.abs(cs.power),
                                       model=model,
                                       yerr=cs.power_err)

    parest = PSDParEst(cs, fitmethod=fitmethod, max_post=max_post)
    res = parest.fit(lgauss, starting_pars, neg=True)

    return parest, res
Пример #11
0
    def test_correct_number_of_parameters(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        with pytest.raises(IncorrectParameterError):
            lpost([2,3])
Пример #12
0
    def test_making_posterior(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)
        lpost.logprior = set_logprior(lpost, self.priors)

        assert lpost.x.all() == self.x.all()
        assert lpost.y.all() == self.y.all()
Пример #13
0
    def test_logprior_fails_without_prior(self):
        lpost = GaussianPosterior(self.x, self.y, self.yerr, self.model)

        with pytest.raises(AttributeError):
            lpost.logprior([10])