def check_prior_support(prior): """Check whether prior allows to check for support. This either uses the PyTorch support property, or the custom prior .logprob method """ try: within_support(prior, prior.sample()) except NotImplementedError: raise NotImplementedError( """The prior must implement the support property or allow to call .log_prob() outside of support.""")
def __call__(self, theta: Tensor, track_gradients: bool = True) -> Tensor: r"""Returns the potential for posterior-based methods. Args: theta: The parameter set at which to evaluate the potential function. track_gradients: Whether to track the gradients. Returns: The potential. """ theta = ensure_theta_batched(torch.as_tensor(theta)) theta, x_repeated = match_theta_and_x_batch_shapes(theta, self.x_o) theta, x_repeated = theta.to(self.device), x_repeated.to(self.device) with torch.set_grad_enabled(track_gradients): posterior_log_prob = self.posterior_estimator.log_prob( theta, context=x_repeated) # Force probability to be zero outside prior support. in_prior_support = within_support(self.prior, theta) posterior_log_prob = torch.where( in_prior_support, posterior_log_prob, torch.tensor(float("-inf"), dtype=torch.float32, device=self.device), ) return posterior_log_prob