Exemple #1
0
    def logprob(self, x, model):
        """compute the log probability of observations x
        
        This includes the model likelihood as well as any prior
        probability of the parameters
        
        Returns
        -------
        lp : float
            the log probability
        """
        # set values of the parameers in self.params to be x
        hyperparameter_utils.set_params_from_array(self.params, x)

        lp = 0.0
        # sum the log probabilities of the parameter priors
        for param in self.params:
            lp += param.prior_logprob()

            if np.isnan(lp):  # Positive infinity should be ok, right?
                print('Param diagnostics:')
                param.print_diagnostics()
                print('Prior logprob: %f' % param.prior_logprob())
                raise Exception("Prior returned %f logprob" % lp)

        if not np.isfinite(lp):
            return lp

        # include the log probability from the model
        lp += model.log_likelihood()

        if np.isnan(lp):
            raise Exception("Likelihood returned %f logprob" % lp)

        return lp
Exemple #2
0
 def sample(self, model):
     for i in range(self.thinning + 1):
         params_array, new_latent_values, current_ll = self.sample_fun(
             model, **self.sampler_options)
         hyperparameter_utils.set_params_from_array(self.params,
                                                    params_array)
         model.latent_values.set_value(new_latent_values)
     self.current_ll = current_ll  # for diagnostics
Exemple #3
0
    def logprob(self, x, model, nu):
        hyperparameter_utils.set_params_from_array(self.params, x)
        lp = 0.0
        for param in self.params:
            lp += param.prior_logprob()

        # Compute this if prior logprob is finite AND there is data
        if np.isfinite(lp) and nu is not None:
            # Get implied y from nu
            implied_y = self._compute_implied_y(model, nu)

            # Instead of marginal likelihood, check binomial likelihood
            lp += model.log_binomial_likelihood(y=implied_y)

        return lp
Exemple #4
0
    def sample(self, model):
        if not model.has_data:
            return np.zeros(
                0)  # TODO this should be a sample from the prior...

        prior_cov = model.noiseless_kernel.cov(model.inputs)
        prior_cov_chol = spla.cholesky(prior_cov, lower=True)
        # Here get the Cholesky from model

        params_array = hyperparameter_utils.params_to_array(self.params)
        for i in range(self.thinning + 1):
            params_array, current_ll = elliptical_slice(
                params_array, self.logprob, prior_cov_chol, model.mean.value,
                model, **self.sampler_options)
            hyperparameter_utils.set_params_from_array(self.params,
                                                       params_array)
        self.current_ll = current_ll  # for diagnostics
Exemple #5
0
 def sample(self, model):
     """generate a new sample of parameters for the model
     
     Notes
     -----
     The parameters are stored as self.params which is a list of Params objects.  
     The values of the parameters are updated on each call.  Pesumably the value of 
     the parameter affects the model (this is not required, but it would be a bit 
     pointless othewise) 
     
     """
     # turn self.params into a 1d numpy array
     params_array = hyperparameter_utils.params_to_array(self.params)
     for i in range(self.thinning + 1):
         # get a new value for the parameter array via slice sampling
         params_array, current_ll = slice_sample(params_array, self.logprob,
                                                 model,
                                                 **self.sampler_options)
         hyperparameter_utils.set_params_from_array(
             self.params, params_array)  # Can this be untabbed safely?
     self.current_ll = current_ll  # for diagnostics
Exemple #6
0
 def logprob(self, x, model):
     hyperparameter_utils.set_params_from_array(self.params, x)
     return model.log_binomial_likelihood(
     )  # no contribution from priors-- Gaussian prior built in to sampler