def log_prob(self): ''' Return the log_prob of the Training Set = log_likelihood + log_prior Running log_prob will cache the log_likelihood, log_prior (for training set) and log_likelihood_test_set if they are not cached ''' if not self.__ll_cached: self.__ll = 0 # intermediate variables storing log_likelihood and log_prior seperately # ll of the training set self.__log_likelihood = sum( message.log_prob() for message_list in self._messages for message in message_list if message.get_end_time() <= self._time_to_split) # ll of the test set self.__log_likelihood_test_set = sum( message.log_prob() for message_list in self._messages for message in message_list if message.get_start_time() > self._time_to_split) # if self._use_prior: self.__log_prior = 0 self.__log_prior += log_gamma(self._time_decay, *self._time_decay_prior).sum() if self._non_diagonal: # off-diagonal entries self.__log_prior += log_gamma( self._influence[eye(self._A) == 0], *self._influence_prior).sum() else: self.__log_prior += log_gamma( self._influence, *self._influence_prior).sum() self.__log_prior += log_gamma( self._word_concentration, *self._word_concentration_prior).sum() self.__log_prior += log_gamma( self._word_pseudocounts, *self._word_pseudocount_prior).sum() else: # no prior self.__log_prior = 0 self.__ll = self.__log_likelihood + self.__log_prior self.__ll_cached = True return self.__ll
def log_prior(self): if self._prior_cached: return self._log_prior else: assert all(self._hyperparams_tau>0) # prior for tau self._log_prior = sum(log_gamma(self._tau, *self._hyperparams_tau)) self._prior_cached = True return self._log_prior