def stepwise_e_step(self, data, varNames): '''collect sufficient statistics for each variable data: 2d numpy array varNames: list of string ''' ctp = NaturalCliqueTreePropagation(self._model) tree = ctp.cliqueTree() cliques = tree.cliques # set up evidence datacase = ContinuousDatacase.create(varNames) datacase.synchronize(self._model) for i in range(len(data)): datacase.putValues(data[i]) evidence = datacase.getEvidence() ctp.use(evidence) ctp.propagate() for j in range(len(cliques)): self.batchSufficientStatistics[j].add(cliques[j].potential) # construct variable to statisticMap variableStatisticMap = dict() for node in self._model.nodes: clique = tree.getClique(node.variable) index = cliques.index(clique) variableStatisticMap[node.variable] = ( self.sufficientStatistics[index], self.batchSufficientStatistics[index]) return variableStatisticMap
def loglikelihood(self, x): """ x: 2d numpy array, each row is one data case """ assert(len(x.shape)==2) datacase = ContinuousDatacase.create(self.varNames) datacase.synchronize(self._model) ctp = NaturalCliqueTreePropagation(self._model) datacase.putValues(x) evidence = datacase.getEvidence() ctp.use(evidence) ctp.propagate() out = ctp.loglikelihood return out
def inference(self, x): """ x: 2d numpy array """ num = x.shape[0] latentVars = self._model.getInternalVariables() datacase = ContinuousDatacase.create(self.varNames) datacase.synchronize(self._model) ctp = NaturalCliqueTreePropagation(self._model) out = [None]*len(latentVars) datacase.putValues(x) evidence = datacase.getEvidence() ctp.use(evidence) ctp.propagate() for j in range(len(latentVars)): latent = np.exp(ctp.getMarginal(latentVars[j]).logprob) out[j] = latent return out
def grad(self, x): datacase = ContinuousDatacase.create(self.varNames) datacase.synchronize(self._model) ctp = NaturalCliqueTreePropagation(self._model) num, dim = x.shape out = np.zeros((num, dim), dtype=np.float32) datacase.putValues(x) evidence = datacase.getEvidence() ctp.use(evidence) ctp.propagate() tree = ctp.cliqueTree() gradient = np.zeros((num, dim)) for node in self._model.getLeafNodes(): clique = tree.getClique(node.variable) cliquePotential = clique.potential.clone() cliquePotential.normalize() moGpotential = node.potential.clone() vars = moGpotential.continuousVariables value = evidence.getValues(vars) b = np.zeros(len(vars)) subgradient = np.zeros((num, len(vars))) for j in range(moGpotential.size): sol = np.linalg.solve(moGpotential.get(j).covar, (np.expand_dims(moGpotential.get(j).mu, axis=0) - value).T).T # subgradient += sol * np.expand_dims(cliquePotential.p[:, j], axis=1) subgradient += sol * np.expand_dims(np.exp(cliquePotential.logp[:, j]), axis=1) for j in range(len(vars)): index = datacase.variables.index(vars[j]) gradient[:, index] = subgradient[:, j] return gradient
[parentnode.variable, childnode.variable], prob) if isinstance(childnodes[0], ContinuousBeliefNode): newnode = self.net.combine(True, childnodes) self.net.addEdge(newnode, parentnode) dim = len(newnode.variable.variables) mus = prob[:, :dim] rest = prob[:, dim:] covs = [] for row in rest: cov = row.reshape((1, dim, dim)) covs.append(cov) covs = np.concatenate(covs, axis=0) newnode.potential.setEntries(mus, covs) if __name__ == "__main__": bifparser = BifParser() net = bifparser.parse("continuoustoy.bif") # set up evidence evidence = Evidence() x = net.getNode("x").variable.variables[0] evidence.add(x, 0) ctp = NaturalCliqueTreePropagation(net) print(ctp._tree) ctp.use(evidence) ctp.propagate() loglikelihood = ctp.loglikelihood print("Loglikelihood: ", loglikelihood)