def test_posterior_cost(): likelihood = MagicMock(return_value=4) prior = MagicMock(return_value=5) posterior = Posterior(likelihood, prior) assert posterior(44.4) == -posterior.cost(44.4)
def test_posterior_cost_gradient(): likelihood = MagicMock(return_value=4) likelihood.gradient.return_value = 0 prior = MagicMock(return_value=5) prior.gradient.return_value = 1 posterior = Posterior(likelihood, prior) assert posterior.gradient(44.4) == -posterior.cost_gradient(44.4)
def test_posterior_gradient(): likelihood = MagicMock(return_value=4) prior = MagicMock(return_value=5) posterior = Posterior(likelihood, prior) posterior.gradient(44.4) likelihood.gradient.assert_called() prior.gradient.assert_called()
def test_posterior_initial_guess_default_args(): likelihood = MagicMock() likelihood.side_effect = lambda x: x prior = MagicMock() prior.side_effect = lambda x: x prior.sample.side_effect = range(200) posterior = Posterior(likelihood, prior) samples = posterior.generate_initial_guesses() assert samples == [99]
def test_posterior_call(): likelihood = MagicMock(return_value=4) prior = MagicMock(return_value=5) posterior = Posterior(likelihood, prior) result = posterior(44.4) assert result == 9 cost = posterior.cost(55.5) assert result == -cost likelihood.assert_called() prior.assert_called()
def test_posterior_initial_guess(): likelihood = MagicMock() likelihood.side_effect = lambda x: x prior = MagicMock() prior.side_effect = lambda x: x prior.sample.side_effect = range(100) posterior = Posterior(likelihood, prior) samples = posterior.generate_initial_guesses(n_guesses=2, prior_samples=10) assert samples == [9, 8] prior.sample.side_effect = range(100) samples = posterior.generate_initial_guesses(n_guesses=1, prior_samples=1) assert samples == [0]
def test_posterior_bad_initial_guess_types(): likelihood = MagicMock(return_value=4) prior = MagicMock(return_value=5) posterior = Posterior(likelihood, prior) with pytest.raises(TypeError): posterior.generate_initial_guesses(2.2) with pytest.raises(TypeError): posterior.generate_initial_guesses(2, 3.3)
UniformPrior(lower=0., upper=12., variable_indices=[2]) ] # Now we use the JointPrior class to combine the various components into a single prior # distribution which covers all the model parameters. prior = JointPrior(components=prior_components, n_variables=4) # As with the likelihood, prior objects can also be called as function to return a # log-probability value when passed a vector of model parameters. We can also draw # samples from the prior directly using the sample() method: prior_sample = prior.sample() print(prior_sample) # The likelihood and prior can be easily combined into a posterior distribution # using the Posterior class: from inference.posterior import Posterior posterior = Posterior(likelihood=likelihood, prior=prior) # Now we have constructed a posterior distribution, we can sample from it # using Markov-chain Monte-Carlo (MCMC). # The inference.mcmc module contains implementations of various MCMC sampling algorithms. # Here we import the PcaChain class and use it to create a Markov-chain object: from inference.mcmc import PcaChain chain = PcaChain(posterior=posterior, start=initial_guess) # We generate samples by advancing the chain by a chosen number of steps using the advance method: chain.advance(25000) # we can check the status of the chain using the plot_diagnostics method: chain.plot_diagnostics()
def test_posterior_bad_initial_guess_values(): likelihood = MagicMock(return_value=4) prior = MagicMock(return_value=5) posterior = Posterior(likelihood, prior) with pytest.raises(ValueError): posterior.generate_initial_guesses(-1) with pytest.raises(ValueError): posterior.generate_initial_guesses(0) with pytest.raises(ValueError): posterior.generate_initial_guesses(1, -3) with pytest.raises(ValueError): posterior.generate_initial_guesses(1, 0) with pytest.raises(ValueError): posterior.generate_initial_guesses(2, 1)