def compute_log_posteriors_multisubject(Xs, partitions=None, alpha=None, prior_H=None, verbose=False): """Compute log of p(H|Xs) for all the Hs, i.e. the partitions, of the classes, given the list of confusion matrices Xs for different (independent subjects), the Dirichlet prior alpha and the hypotheses' prior p(H) (prior_H). """ if partitions is None: partitions = list(Partition(range(Xs[0].shape[0]))) if alpha is None: if verbose: print "Assuming non-informative Dirichlet prior." alpha = np.ones(Xs[0].shape) if prior_H is None: if verbose: print "Assuming uniform prior for p(H_i)." prior_H = np.ones(len(partitions)) / len(partitions) logp_X_given_H = np.zeros((len(partitions), len(Xs))) for i, partition in enumerate(partitions): for j in range(len(Xs)): logp_X_given_H[i,j] = compute_logp_H(Xs[j], partition, alpha=alpha) # normalization constant: p(X) logp_X = reduce(np.logaddexp, logp_X_given_H.sum(1) + np.log(prior_H)) # p(H|X) from Bayes rule: log_posterior_H_given_X = logp_X_given_H.sum(1) + np.log(prior_H) - logp_X return log_posterior_H_given_X, partitions
from partitioner import Partition if __name__ == '__main__': X = tu print "X:" print X partitions = list(Partition(range(X.shape[0]))) alpha = np.ones(X.shape) # uniform prior on confusion matrices print "alpha:" print alpha # uniform prior on hypotheses: p(H_i) prior_H = np.ones(len(partitions)) / len(partitions) logp_X_given_H = np.zeros(len(partitions)) for i, partition in enumerate(partitions): logp_X_given_H[i] = compute_logp_H(X, partition, alpha=alpha) # normalization constant: p(X) logp_X = reduce(np.logaddexp, logp_X_given_H + np.log(prior_H)) # p(H|X) from Bayes rule: log_posterior_H_given_X = logp_X_given_H + np.log(prior_H) - logp_X idx = np.argsort(log_posterior_H_given_X)[::-1] print for k, i in enumerate(idx[:5]): print "%s) p(%s | X) = %s" % (k+1, partitions[i], np.exp(log_posterior_H_given_X[i]))
psi = [[0, 1], [2]] # psi = [[0],[1],[2]] # psi = [[0],[1,2]] # psi = [[0,1,2]] # psi = [[0],[1],[2],[3],[4]] # psi = [[0,1],[2],[3],[4]] # psi = [[0,1,2],[3],[4]] # psi = [[0,1,2,3],[4]] # psi = [[0,1,2],[3,4]] # psi = [[0,1,2,3,4]] print "psi:", psi alpha = np.ones(X.shape) print "alpha:" print alpha logp_Hs = [] print "psi \t log-likelihood" partitions = Partition(range(X.shape[0])) partitions1 = Partition(range(1, X.shape[0] + 1)) for psi in partitions: logp_H = compute_logp_H(X, psi, alpha) print psi, logp_H logp_Hs.append(logp_H) idxs = np.argsort(logp_Hs) print for idx in idxs: print partitions[idx], logp_Hs[idx]