Example #1
0
def marginal_ve_e(bn, target, evidence={}):
    """
	Perform Sum-Product Variable Elimination on
	a Discrete Bayesian Network.

	Arguments
	---------
	*bn* : a BayesNet object

	*target* : a list of target RVs

	*evidence* : a dictionary, where
		key = rv and value = rv value

	Returns
	-------
	*marginal_dict* : a dictionary, where
		key = an rv in target and value =
		a numpy array containing the key's
		marginal conditional probability distribution.

	Notes
	-----
	- Mutliple pieces of evidence often returns "nan"...numbers too small?
		- dividing by zero -> perturb values in Factor class
	"""
    _phi = Factorization(bn)

    order = copy(list(bn.nodes()))
    order.remove(target)

    #### EVIDENCE PROCESSING ####
    for E, e in evidence.items():
        _phi -= (E, e)
        order.remove(E)

    #### SUM-PRODUCT ELIMINATE VAR ####
    for var in order:
        _phi /= var

    # multiply phi's together if there is evidence
    final_phi = _phi.consolidate()

    return np.round(final_phi.cpt, 4)
Example #2
0
def marginal_ve_e(bn, target, evidence={}):
	"""
	Perform Sum-Product Variable Elimination on
	a Discrete Bayesian Network.

	Arguments
	---------
	*bn* : a BayesNet object

	*target* : a list of target RVs

	*evidence* : a dictionary, where
		key = rv and value = rv value

	Returns
	-------
	*marginal_dict* : a dictionary, where
		key = an rv in target and value =
		a numpy array containing the key's
		marginal conditional probability distribution.

	Notes
	-----
	- Mutliple pieces of evidence often returns "nan"...numbers too small?
		- dividing by zero -> perturb values in Factor class
	"""
	_phi = Factorization(bn)

	order = copy(list(bn.nodes()))
	order.remove(target)

	#### EVIDENCE PROCESSING ####
	for E, e in evidence.items():
		_phi -= (E,e)
		order.remove(E)

	#### SUM-PRODUCT ELIMINATE VAR ####
	for var in order:
		_phi /= var

	# multiply phi's together if there is evidence
	final_phi = _phi.consolidate()

	return np.round(final_phi.cpt,4)
Example #3
0
def ve_map(bn,
            evidence={},
            target=None,
            prob=False):
    """
    Perform Max-Sum Variable Elimination over a BayesNet object
    for exact maximum a posteriori inference.

    This has been validated w/ and w/out evidence
    
    """
    _phi = Factorization(bn)

    order = copy(list(bn.nodes()))
    #### EVIDENCE PROCESSING ####
    for E, e in evidence.items():
        _phi -= (E,e)
        order.remove(E)

    #### MAX-PRODUCT ELIMINATE VAR ####
    for var in order:
        _phi //= var 
    
    #### TRACEBACK MAP ASSIGNMENT ####
    max_assignment = _phi.traceback_map()

    #### RETURN ####
    if prob:
        # multiply phi's together if there is evidence
        final_phi = _phi.consolidate()
        max_prob = round(final_phi.cpt[0],5)

        if target is not None:
            return max_prob, max_assignment[target]
        else:
            return max_prob, max_assignment
    else:
        if target is not None:
            return max_assignment[target]
        else:
            return max_assignment
Example #4
0
def ve_map(bn, evidence={}, target=None, prob=False):
    """
    Perform Max-Sum Variable Elimination over a BayesNet object
    for exact maximum a posteriori inference.

    This has been validated w/ and w/out evidence
    
    """
    _phi = Factorization(bn)

    order = copy(list(bn.nodes()))
    #### EVIDENCE PROCESSING ####
    for E, e in evidence.items():
        _phi -= (E, e)
        order.remove(E)

    #### MAX-PRODUCT ELIMINATE VAR ####
    for var in order:
        _phi //= var

    #### TRACEBACK MAP ASSIGNMENT ####
    max_assignment = _phi.traceback_map()

    #### RETURN ####
    if prob:
        # multiply phi's together if there is evidence
        final_phi = _phi.consolidate()
        max_prob = round(final_phi.cpt[0], 5)

        if target is not None:
            return max_prob, max_assignment[target]
        else:
            return max_prob, max_assignment
    else:
        if target is not None:
            return max_assignment[target]
        else:
            return max_assignment
Example #5
0
    def initialize_tree(self):
        """
        Initialize the structure of a clique tree, using
        the following steps:
            - Moralize graph (i.e. marry parents)
            - Triangulate graph (i.e. make graph chordal)
            - Get max cliques (i.e. community/clique detection)
            - Max spanning tree over sepset cardinality (i.e. create tree)
        
        """
        ### MORALIZE GRAPH & MAKE IT CHORDAL ###
        chordal_G = make_chordal(self.bn)  # must return a networkx object
        V = chordal_G.nodes()

        ### GET MAX CLIQUES FROM CHORDAL GRAPH ###
        C = {}  # key = vertex, value = clique object
        max_cliques = reversed(list(nx.chordal_graph_cliques(chordal_G)))
        for v_idx, clique in enumerate(max_cliques):
            C[v_idx] = Clique(set(clique))

        ### MAXIMUM SPANNING TREE OVER COMPLETE GRAPH TO MAKE A TREE ###
        weighted_edge_dict = dict([(c_idx, {}) for c_idx in xrange(len(C))])
        for i in range(len(C)):
            for j in range(len(C)):
                if i != j:
                    intersect_cardinality = len(C[i].sepset(C[j]))
                    weighted_edge_dict[i][j] = -1 * intersect_cardinality
        mst_G = mst(weighted_edge_dict)
        ### SET V,E,C ###
        self.E = mst_G  # dictionary
        self.V = mst_G.keys()  # list
        self.C = C

        ### ASSIGN EACH FACTOR TO ONE CLIQUE ONLY ###
        v_a = dict([(rv, False) for rv in self.bn.nodes()])
        for clique in self.C.values():
            temp_scope = []
            for var in v_a:
                if v_a[var] == False and set(self.bn.scope(var)).issubset(
                        clique.scope):
                    temp_scope.append(var)
                    v_a[var] = True
            clique._F = Factorization(self.bn, temp_scope)

        ### COMPUTE INITIAL POTENTIAL FOR EACH FACTOR ###
        # - i.e. multiply all of its assigned factors together
        for i, clique in self.C.items():
            if len(self.parents(i)) == 0:
                clique.is_ready = True
            clique.initialize_psi()
Example #6
0
    def __init__(self, bn):
        """
        Instantiate a CliqueTree object.

        Arguments
        ---------
        *bn*: a BayesNet object

        Notes
        -----
        Ideally, the Factor class should be used as the
        cliques instead of the Clique class (because it's
        just a watered down version of the Factor class)        
        
        """
        ####
        self.V = None
        self.E = None
        self.C = None
        ###

        self.bn = bn
        self._F = Factorization(bn)
        self.initialize_tree()