def test_running_urbk(self): prior_rv = normal.med(mu=0.5, sigma=1.0) n = normal.fast_p prior = jit.jit_probability(prior_rv) @numba.jit(fastmath=True, nopython=True, forceobj=False) def likelihood(y, w): return n(y - w, mu=0.0, sigma=1.0) data = normal.sample(mu=3.0, sigma=1.0, size=100) log_likelihood, log_prior = jit_log_probabilities((data,), likelihood, prior) samples, densities = search_posterior_estimation( size=300, log_likelihood=log_likelihood, log_prior=log_prior, initial=prior_rv.sample(size=10), energy=0.1, volume=100 ) density = URBK(variance=5.0, verbose=True) density.fit(samples, densities) lb, ub = -6, 6 n = 2000 x = np.linspace(lb, ub, n) y = density.p(x) self.assertEqual(y.size, 2000) fast_p = density.get_fast_p() fast_p(x) # is slower than normal p.. but numba need numba functions
def _execute_test(distribution=None, parameters=None, names=None): for test_case in itertools.product([0, 1], repeat=len(parameters)): kwargs, args = {}, [] for i, pick in enumerate(test_case): if pick == 1: kwargs[names[i]] = parameters[i] else: args.append(parameters[i]) rv = distribution.med(**kwargs) samples = rv.sample(*args, size=10) fast_p = jit.jit_probability(rv) fast_p(samples, *args)
def test_parameter_posterior_mcmc(self): prior_rv = normal.med(mu=0.5, sigma=1.0) n = normal.fast_p prior = jit.jit_probability(prior_rv) @numba.jit(fastmath=True, nopython=True, forceobj=False) def likelihood(y, w): return n(y - w, mu=0.0, sigma=1.0) data = normal.sample(mu=3.0, sigma=1.0, size=100) log_likelihood, log_prior = jit_log_probabilities((data, ), likelihood, prior) result = fast_metropolis_hastings_log_space_parameter_posterior_estimation( size=2000, log_likelihood=log_likelihood, log_prior=log_prior, initial=prior_rv.sample(size=10), energy=0.1) self.assertAlmostEqual(result.mean(), 3.0, delta=1.0)
def test_parameter_posterior_search(self): prior_rv = normal.med(mu=0.5, sigma=1.0) n = normal.fast_p prior = jit.jit_probability(prior_rv) @numba.jit(fastmath=True, nopython=True, forceobj=False) def likelihood(y, w): return n(y - w, mu=0.0, sigma=1.0) data = normal.sample(mu=3.0, sigma=1.0, size=100) log_likelihood, log_prior = jit_log_probabilities((data, ), likelihood, prior) points, densities = search_posterior_estimation( size=1000, log_likelihood=log_likelihood, log_prior=log_prior, initial=prior_rv.sample(size=10), energy=0.1, volume=1000) self.assertTrue(True)
def jitted_likelihood(likelihood: Union[RandomVariable, Callable[[np.ndarray], np.ndarray]]): if type(likelihood) == RandomVariable: return jit_probability(likelihood) return numba.jit(nopython=True, fastmath=True, forceobj=False)(likelihood)
def jitted_prior(rv: RandomVariable): return jit_probability(rv)