def get_inf_jtree(self): sn = self.get_factor_graph() prop = dai.PropertySet() prop["inference"] = "SUMPROD" prop["updates"] = "HUGIN" prop["verbose"] = "1" inf = dai.JTree(sn, prop) return inf
def run_loopyBP(sg_model, maxiter, updates="SEQRND", damping=None): if maxiter is None: maxiter = LIBDAI_LBP_ITERS sg_FactorGraph = build_libdaiFactorGraph_from_SpinGlassModel( sg_model, fixed_variables={}) # sg_FactorGraph = build_graph_from_clique_ising_model(sg_model, fixed_variables={}) # Write factorgraph to a file sg_FactorGraph.WriteToFile('sg_temp.fg') # Set some constants # maxiter = 10000 maxiter = maxiter # maxiter = 4 tol = 1e-9 verb = 1 # Store the constants in a PropertySet object opts = dai.PropertySet() opts["maxiter"] = str(maxiter) # Maximum number of iterations opts["tol"] = str(tol) # Tolerance for convergence opts["verbose"] = str( verb ) # Verbosity (amount of output generated)bpopts["updates"] = "SEQRND" ##################### Run Loopy Belief Propagation ##################### # Construct a BP (belief propagation) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and two additional properties, # specifying the type of updates the BP algorithm should perform and # whether they should be done in the real or in the logdomain bpopts = opts # bpopts["updates"] = "SEQRND" # bpopts["updates"] = "PARALL" bpopts["updates"] = updates bpopts["logdomain"] = "1" if damping is not None: bpopts["damping"] = damping bp = dai.BP(sg_FactorGraph, bpopts) # Initialize belief propagation algorithm bp.init() # Run belief propagation algorithm bp.run() # Report log partition sum of sg_FactorGraph, approximated by the belief propagation algorithm ln_z_estimate = bp.logZ() # print(type(bp.belief(sg_FactorGraph.var(0)))) # print(bp.belief(sg_FactorGraph.var(0))[0]) # sleep(aslfkdj) return ln_z_estimate
def get_inf_bp(self, logdomain=False, verbose=False): sn = dai.FactorGraph(self.vecfac) prop = dai.PropertySet() prop["tol"] = "1e-9" if logdomain: prop["logdomain"] = "1" else: prop["logdomain"] = "0" prop["updates"] = "SEQFIX" if verbose: prop["verbose"] = "1" else: prop["verbose"] = "0" lb_inf = dai.BP(sn, prop) return lb_inf
def get_inf(self, name, **kwds): """ cls, config = network_toolkit.dai_conf.config_map[name] prop = dai.PropertySet() for c in config: prop[c] = config[c] if verbose: prop["verbose"] = "1" sn = dai.FactorGraph(self.vecfac) inf = cls(sn, prop) return inf """ sn = self.get_factor_graph() prop = dai.PropertySet() for k in kwds: prop[k] = str(kwds[k]) inf_alg = dai.newInfAlg(name, sn, prop) return inf_alg
def run_mean_field(sg_model, maxiter): if maxiter is None: maxiter = LIBDAI_MEAN_FIELD_ITERS sg_FactorGraph = build_libdaiFactorGraph_from_SpinGlassModel( sg_model, fixed_variables={}) # sg_FactorGraph = build_graph_from_clique_ising_model(sg_model, fixed_variables={}) # Write factorgraph to a file sg_FactorGraph.WriteToFile('sg_temp.fg') # Set some constants # maxiter = 10000 maxiter = maxiter # maxiter = 4 tol = 1e-9 verb = 1 # Store the constants in a PropertySet object opts = dai.PropertySet() opts["maxiter"] = str(maxiter) # Maximum number of iterations opts["tol"] = str(tol) # Tolerance for convergence opts["verbose"] = str(verb) # Verbosity (amount of output generated) ##################### Run Loopy Belief Propagation ##################### # Construct an MF (mean field) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and two additional properties, mfopts = opts # mfopts["damping"] = ".5" mf = dai.MF(sg_FactorGraph, mfopts) # Initialize mean field algorithm mf.init() # Run mean field algorithm mf.run() # Report log partition sum of sg_FactorGraph, approximated by the mean field algorithm ln_z_estimate = mf.logZ() return ln_z_estimate
def run(self, method_name, pseudo_count=0.1, **kwds): self.dai_factor_graph.setup_factor_graph() obsvec = dai.ObservationVec() for sample in self.evidence_map: obs = dai.Observation() for var in self.evidence_map[sample]: #obs[ self.factor_graph.var_map[var].dai_variable ] = self.evidence_map[sample][var] #obs[ var.get_dai_value() ] = self.evidence_map[sample][var] obs[self.dai_factor_graph. dai_variables[var]] = self.evidence_map[sample][var] obsvec.append(obs) sp_vec = dai.VecSharedParameters() for sp in self.shared_param_list: fo = dai.FactorOrientations() for variable_list, factor in sp.shared_list: vec = dai.VecVar() for v in variable_list: vec.append(self.dai_factor_graph.dai_variables[v]) fo[factor.factor_id] = vec total_dim = 1 for d in sp.variable_dims: total_dim *= d #props = dai.PropertySet() #props["total_dim"] = str(total_dim) #props["target_dim"] = str(sp.variable_dims[0]) #pe = dai.ParameterEstimation.construct("CondProbEstimation", props) pseudo_counts = dai.Prob(total_dim, pseudo_count) #for i in range(total_dim): # pseudo_count.append( 0.01 ) pe = dai.CondProbEstimation(sp.variable_dims[0], pseudo_counts) dai_sp = dai.SharedParameters(fo, pe) sp_vec.append(dai_sp) max_step = dai.MaximizationStep(sp_vec) vec_max_step = dai.VecMaximizationStep() vec_max_step.append(max_step) evidence = dai.Evidence(obsvec) #inf_alg = self.factor_graph.get_inf_bp(verbose=True) prop = dai.PropertySet() for k in kwds: prop[k] = str(kwds[k]) inf_alg = dai.newInfAlg(method_name, self.dai_factor_graph.get_factor_graph(), prop) em_props = dai.PropertySet() dai_em_alg = dai.EMAlg(evidence, inf_alg, vec_max_step, em_props) while not dai_em_alg.hasSatisfiedTermConditions(): print "Cycle 1" l = dai_em_alg.iterate() if kwds.get("verbose", False): print "Iteration ", dai_em_alg.Iterations(), " likelihood: ", l for max_step in dai_em_alg: print max_step, len(max_step) for shared_param in max_step: print "\t", shared_param.currentExpectations() print "\t", shared_param.getPEst().estimate( shared_param.currentExpectations()) for i in range(len(dai_em_alg[0])): shared_param = dai_em_alg[0][i] param_vec = shared_param.getPEst().estimate( shared_param.currentExpectations()) var_list = self.shared_param_list[i].variable_set_labels var_dims = self.shared_param_list[i].variable_dims self.shared_param_list[i].result = param_vec
def run_inference(sg_model): sg_FactorGraph = build_libdaiFactorGraph_from_SpinGlassModel( sg_model, fixed_variables={}) # sg_FactorGraph = build_graph_from_clique_ising_model(sg_model, fixed_variables={}) # Write factorgraph to a file sg_FactorGraph.WriteToFile('sg_temp.fg') print('spin glass factor graph written to sg_temp.fg') # Output some information about the factorgraph print(sg_FactorGraph.nrVars(), 'variables') print(sg_FactorGraph.nrFactors(), 'factors') # Set some constants maxiter = 10000 tol = 1e-9 verb = 1 # Store the constants in a PropertySet object opts = dai.PropertySet() opts["maxiter"] = str(maxiter) # Maximum number of iterations opts["tol"] = str(tol) # Tolerance for convergence opts["verbose"] = str( verb ) # Verbosity (amount of output generated)bpopts["updates"] = "SEQRND" ##################### Run Loopy Belief Propagation ##################### print() print('-' * 80) # Construct a BP (belief propagation) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and two additional properties, # specifying the type of updates the BP algorithm should perform and # whether they should be done in the real or in the logdomain bpopts = opts bpopts["updates"] = "SEQRND" bpopts["logdomain"] = "1" bp = dai.BP(sg_FactorGraph, bpopts) # Initialize belief propagation algorithm bp.init() # Run belief propagation algorithm bp.run() # Report log partition sum of sg_FactorGraph, approximated by the belief propagation algorithm print('Approximate (loopy belief propagation) log partition sum:', bp.logZ()) ##################### Run Tree Re-weighted Belief Propagation ##################### print() print('-' * 80) # Construct a BP (belief propagation) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and two additional properties, # specifying the type of updates the BP algorithm should perform and # whether they should be done in the real or in the logdomain trwbp_opts = opts trwbp_opts["updates"] = "SEQRND" trwbp_opts["nrtrees"] = "10" trwbp_opts["logdomain"] = "1" trwbp = dai.TRWBP(sg_FactorGraph, trwbp_opts) # trwbp = dai.FBP( sg_FactorGraph, trwbp_opts ) # Initialize belief propagation algorithm trwbp.init() # Run belief propagation algorithm t0 = time.time() trwbp.run() t1 = time.time() # Report log partition sum of sg_FactorGraph, approximated by the belief propagation algorithm print( 'Approximate (tree re-weighted belief propagation) log partition sum:', trwbp.logZ()) print('time =', t1 - t0) ##################### Run Junction Tree Algorithm ##################### print() print('-' * 80) # Construct a JTree (junction tree) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and an additional property # that specifies the type of updates the JTree algorithm should perform jtopts = opts jtopts["updates"] = "HUGIN" jt = dai.JTree(sg_FactorGraph, jtopts) # Initialize junction tree algorithm jt.init() # Run junction tree algorithm jt.run() # Construct another JTree (junction tree) object that is used to calculate # the joint configuration of variables that has maximum probability (MAP state) jtmapopts = opts jtmapopts["updates"] = "HUGIN" jtmapopts["inference"] = "MAXPROD" jtmap = dai.JTree(sg_FactorGraph, jtmapopts) # Initialize junction tree algorithm jtmap.init() # Run junction tree algorithm jtmap.run() # Calculate joint state of all variables that has maximum probability jtmapstate = jtmap.findMaximum() # Report log partition sum (normalizing constant) of sg_FactorGraph, calculated by the junction tree algorithm print() print('-' * 80) print('Exact log partition sum:', jt.logZ())
def junction_tree(sg_model, verbose=False): ''' Calculate the exact partition function of a spin glass model using the junction tree algorithm Inputs: - sg_model (SpinGlassModel) Outputs: - ln_Z: natural logarithm of the exact partition function ''' sg_FactorGraph = build_libdaiFactorGraph_from_SpinGlassModel( sg_model, fixed_variables={}) # sg_FactorGraph = build_graph_from_clique_ising_model(sg_model, fixed_variables={}) # Write factorgraph to a file sg_FactorGraph.WriteToFile('sg_temp.fg') if verbose: print('spin glass factor graph written to sg_temp.fg') # Output some information about the factorgraph if verbose: print(sg_FactorGraph.nrVars(), 'variables') print(sg_FactorGraph.nrFactors(), 'factors') # Set some constants maxiter = 10000 tol = 1e-9 verb = 0 # Store the constants in a PropertySet object opts = dai.PropertySet() opts["maxiter"] = str(maxiter) # Maximum number of iterations opts["tol"] = str(tol) # Tolerance for convergence opts["verbose"] = str( verb ) # Verbosity (amount of output generated)bpopts["updates"] = "SEQRND" ##################### Run Junction Tree Algorithm ##################### # Construct a JTree (junction tree) object from the FactorGraph sg_FactorGraph # using the parameters specified by opts and an additional property # that specifies the type of updates the JTree algorithm should perform jtopts = opts jtopts["updates"] = "HUGIN" jt = dai.JTree(sg_FactorGraph, jtopts) # Initialize junction tree algorithm jt.init() # Run junction tree algorithm jt.run() # Construct another JTree (junction tree) object that is used to calculate # the joint configuration of variables that has maximum probability (MAP state) jtmapopts = opts jtmapopts["updates"] = "HUGIN" jtmapopts["inference"] = "MAXPROD" jtmap = dai.JTree(sg_FactorGraph, jtmapopts) # Initialize junction tree algorithm jtmap.init() # Run junction tree algorithm jtmap.run() # Calculate joint state of all variables that has maximum probability jtmapstate = jtmap.findMaximum() #doesn't work # print(type(bp.belief(sg_FactorGraph.var(0)))) # print(jtmap.belief(sg_FactorGraph.var(0))[0]) # sleep(aslfkdj) ln_Z = jt.logZ() # Report log partition sum (normalizing constant) of sg_FactorGraph, calculated by the junction tree algorithm if verbose: print() print('-' * 80) print('Exact log partition sum:', ln_Z) return (ln_Z)
# TODO THIS CRASHES # Read FactorGraph from the file specified by the first command line argument fg = dai.FactorGraph() fg.ReadFromFile(sys.argv[1]) maxstates = 1000000 if len(sys.argv) == 3: maxstates = int(sys.argv[2]) # Set some constants maxiter = 10000 tol = 1e-9 verb = 1 # Store the constants in a PropertySet object opts = dai.PropertySet() opts["maxiter"] = str(maxiter) # Maximum number of iterations opts["tol"] = str(tol) # Tolerance for convergence opts["verbose"] = str(verb) # Verbosity (amount of output generated) # Bound treewidth for junctiontree do_jt = True # TODO # try { # boundTreewidth(fg, &eliminationCost_MinFill, maxstates ); # } catch( Exception &e ) { # if( e.getCode() == Exception::OUT_OF_MEMORY ) { # do_jt = false; # cout << "Skipping junction tree (need more than " << maxstates << " states)." << endl; # } # else