Ejemplo n.º 1
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittestdyndict.txt")
     self.skel = GraphSkeleton()
     self.skel.load("unittestdyndict.txt")
     self.skel.toporder()
     self.d = DynDiscBayesianNetwork(self.skel, self.nd)
Ejemplo n.º 2
0
 def set_bayesnet(self):
     nd = NodeData()
     skel = GraphSkeleton()
     nd.load(self.file)
     skel.load(self.file)
     skel.toporder()
     self.bn = DiscreteBayesianNetwork(skel, nd)
Ejemplo n.º 3
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
Ejemplo n.º 4
0
    def setUp(self):
        nodedata = NodeData()
        nodedata.load("unittestlgdict.txt")
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        self.lgb = LGBayesianNetwork(skel, nodedata)
Ejemplo n.º 5
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     self.fn = TableCPDFactorization(self.bn)
Ejemplo n.º 6
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittesthdict.txt")
     self.nd.entriestoinstances()
     self.skel = GraphSkeleton()
     self.skel.load("unittestdict.txt")
     self.skel.toporder()
     self.hybn = HyBayesianNetwork(self.skel, self.nd)
Ejemplo n.º 7
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
     self.factor = TableCPDFactor("Grade", self.instance)
     self.factor2 = TableCPDFactor("Letter", self.instance)
Ejemplo n.º 8
0
def getTableCPD():
    nd = NodeData()
    skel = GraphSkeleton()
    jsonpath = ""
    nd.load(jsonpath)
    skel.load(jsonpath)
    bn = DiscreteBayesianNetwork(skel, nd)
    tablecpd = TableCPDFactorization(bn)
    return tablecpd
def getTableCPD():
    nd = NodeData()
    skel = GraphSkeleton()
    jsonpath = "./graph/graph_example.txt"
    nd.load(jsonpath)
    skel.load(jsonpath)
    # load Bayesian network
    bn = DiscreteBayesianNetwork(skel, nd)
    tablecpd = TableCPDFactorization(bn)
    return tablecpd
def q_without_ros():
    skel = GraphSkeleton()
    skel.V = ["prize_door", "guest_door", "monty_door"]
    skel.E = [["prize_door", "monty_door"], ["guest_door", "monty_door"]]
    skel.toporder()
    nd = NodeData()
    nd.Vdata = {
        "prize_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0 / 3, 1.0 / 3, 1.0 / 3],
        },
        "guest_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0 / 3, 1.0 / 3, 1.0 / 3],
        },
        "monty_door": {
            "numoutcomes": 3,
            "parents": ["prize_door", "guest_door"],
            "children": None,
            "vals": ["A", "B", "C"],
            "cprob": {
                "['A', 'A']": [0., 0.5, 0.5],
                "['B', 'B']": [0.5, 0., 0.5],
                "['C', 'C']": [0.5, 0.5, 0.],
                "['A', 'B']": [0., 0., 1.],
                "['A', 'C']": [0., 1., 0.],
                "['B', 'A']": [0., 0., 1.],
                "['B', 'C']": [1., 0., 0.],
                "['C', 'A']": [0., 1., 0.],
                "['C', 'B']": [1., 0., 0.],
            },
        },
    }
    bn = DiscreteBayesianNetwork(skel, nd)
    fn = TableCPDFactorization(bn)

    query = {
        "prize_door": ["A", "B", "C"],
    }
    evidence = {
        "guest_door": "A",
        "monty_door": "B",
    }

    res = fn.condprobve(query, evidence)
    print res.vals
    print res.scope
    print res.card
    print res.stride
Ejemplo n.º 11
0
    def load(self, file_name):
        #### Load BN
        nd = NodeData()
        skel = GraphSkeleton()
        nd.load(file_name)  # any input file
        skel.load(file_name)

        # topologically order graphskeleton
        skel.toporder()

        super(DiscreteBayesianNetworkExt, self).__init__(skel, nd)
Ejemplo n.º 12
0
 def test_query(self):
     teacher_nd = NodeData()
     teacher_nd.load(self.teacher_data_path)
     req = DiscreteQueryRequest()
     req.nodes = U.discrete_nodes_to_ros(teacher_nd.Vdata)
     req.evidence = [DiscreteNodeState("Letter", "weak")]
     req.query = ["Grade"]
     res = self.query(req)
     self.assertEqual(len(res.nodes), 1)
     n = res.nodes[0]
     self.assertEqual(n.name, "Grade")
     self.assertListEqual(['A', 'B', 'C'], n.outcomes)
Ejemplo n.º 13
0
    def setUp(self):
        # instantiate learner
        self.l = PGMLearner()

        # generate graph skeleton
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        # generate sample sequence to try to learn from - discrete
        nd = NodeData()
        nd.load("unittestdict.txt")
        self.samplediscbn = DiscreteBayesianNetwork(skel, nd)
        self.samplediscseq = self.samplediscbn.randomsample(5000)

        # generate sample sequence to try to learn from - discrete
        nda = NodeData()
        nda.load("unittestlgdict.txt")
        self.samplelgbn = LGBayesianNetwork(skel, nda)
        self.samplelgseq = self.samplelgbn.randomsample(10000)

        self.skel = skel
Ejemplo n.º 14
0
def loadbn(param_file):
    """
    This function loads the bn model into the workspace from its associated .txt file.
    """
    file_path = os.path.join(experiment_dir, 'parameters', param_file + '.txt')

    nd = NodeData()
    skel = GraphSkeleton()
    nd.load(file_path)
    skel.load(file_path)
    skel.toporder()
    bn = DiscreteBayesianNetwork(skel, nd)
    return bn
Ejemplo n.º 15
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     agg = SampleAggregator()
     agg.aggregate(self.bn.randomsample(50))
     self.rseq = agg.seq
     self.ravg = agg.avg
     self.fn = TableCPDFactorization(self.bn)
     evidence = dict(Letter='weak')
     agg.aggregate(self.fn.gibbssample(evidence, 51))
     self.gseq = agg.seq
     self.gavg = agg.avg
Ejemplo n.º 16
0
 def construct(self):
     skel = GraphSkeleton()
     skel.V = self.nodes.keys()
     skel.E = []
     for node, ndata in self.nodes.iteritems():
         if ndata['parents']:
             for p in ndata['parents']:
                 skel.E.append([p, node])
                 self.nodes[p]['children'].append(node)
     for node, ndata in self.nodes.iteritems():
         if len(ndata['children']) == 0:
             ndata['children'] = None
     data = NodeData()
     data.Vdata = self.nodes
     skel.toporder()
     bn = DiscreteBayesianNetwork(skel, data)
     return bn
Ejemplo n.º 17
0
def net2():
    nd = NodeData()
    skel = GraphSkeleton()
    nd.load("net.txt")  # an input file
    skel.load("net.txt")

    # topologically order graphskeleton
    skel.toporder()

    # load bayesian network
    lgbn = LGBayesianNetwork(skel, nd)

    in_data = read_data.getdata2()
    learner = PGMLearner()
    bn = learner.lg_mle_estimateparams(skel, in_data)

    p = cal_prob(in_data[300:500], bn)
    print p
    return 0
Ejemplo n.º 18
0
    def test_structure_estimation(self):
        req = DiscreteStructureEstimationRequest()

        skel = GraphSkeleton()
        skel.load(self.data_path)
        skel.toporder()
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(8000)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        res = self.struct_estimate(req)
        self.assertIsNotNone(res.graph)
        self.assertEqual(len(res.graph.nodes), 5)
        self.assertGreater(len(res.graph.edges), 0)
Ejemplo n.º 19
0
def main():

    in_data = read_data.getdata()
    f_data = format_data(in_data)
    nd = NodeData()
    nd.load("net4.txt")  # an input file
    skel = GraphSkeleton()
    skel.load("net4.txt")
    skel.toporder()
    bn = DiscreteBayesianNetwork(skel, nd)

    #training dataset:70%
    bn2 = em(f_data[1:6000], bn, skel)

    pr_training = precision(f_data[1:6000], bn2)

    print "Prediction accuracy for training data:", pr_training[1]

    #testing dataset:30%
    pr = precision(f_data[6700:6800], bn2)
    print "Prediction accuracy for test data:", pr[1]
Ejemplo n.º 20
0
    def test_param_estimation(self):
        req = DiscreteParameterEstimationRequest()

        # load graph structure
        skel = GraphSkeleton()
        skel.load(self.data_path)
        req.graph.nodes = skel.V
        req.graph.edges = [GraphEdge(k, v) for k, v in skel.E]
        skel.toporder()

        # generate trial data
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(200)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        self.assertEqual(len(self.param_estimate(req).nodes), 5)
Ejemplo n.º 21
0
Archivo: pgm.py Proyecto: ml4ai/b3
 def setup(self):
     self.nd = NodeData()
     self.skel = GraphSkeleton()
     self.skel.V, self.skel.E = [], []
     self.nd.Vdata = {}
     for i, node in enumerate(self.node.values()):
         dNode = {}
         node.sId = str(i)
         dNode["numoutcomes"] = len(node.values)
         dNode["vals"] = node.values
         dNode["cprob"] = node.cpt
         #             dNode["parents"] = map(lambda x: if x=x.name, node.parents);
         self.skel.V.append(node.name)
         aParents = []
         for parent in node.parents:
             if parent == None: continue
             aParents.append(parent.name)
             self.skel.E.append([parent.name, node.name])
         dNode["parents"] = aParents if len(aParents) > 0 else None
         self.nd.Vdata[node.name] = dNode
     self.skel.toporder()
     self.bn = DiscreteBayesianNetwork(self.skel, self.nd)
     self.fn = TableCPDFactorization(self.bn)
Ejemplo n.º 22
0
 def setUp(self):
     self.nd = NodeData()
Ejemplo n.º 23
0
    def __init__(self, nodes):

        self.nodes = {}

        self.children = defaultdict(list)
        self.parents = defaultdict(list)
        self.outputs = {}
        for name, node_spec in nodes.iteritems():
            node_type = node_spec["type"]
            if node_type == "inferred":
                parents = node_spec["parents"]
                # store the relationship between these elements
                for parent in parents:
                    normalised = normalise_name(parent)
                    self.parents[name].append(normalised)
                    self.children[normalised].append(name)
                truth_table = parse_truth_table(node_spec["p"], parents)
                node = make_node(truth_table, parents, node_type)
                self.nodes[name] = node

            if node_type == "fsm_input":
                node = make_node([1.0, 0.0], None, node_type)
                self.nodes[name] = node

            if node_type == "sensor_input":
                proxy_node = make_node([1.0, 0.0], None, "proxy")
                proxy_name = "_proxy_%s" % name
                self.nodes[proxy_name] = proxy_node
                self.children[proxy_name].append(name)
                node = make_node({
                    "['T']": [1.0, 0.0],
                    "['F']": [0.0, 1.0]
                }, [proxy_name], node_type)
                self.nodes[name] = node
            if node_type == "output":
                self.outputs[name] = node_spec

        for node in self.nodes:
            if len(self.children[node]) > 0:
                self.nodes[node]["children"] = self.children[node]
            else:
                self.nodes[node]["children"] = None

        # certainty scaling
        self.event_caution = 0.0

        og = OrderedSkeleton()
        og.V = self.nodes.keys()
        edges = []
        for k, children in self.children.iteritems():
            for child in children:
                edges.append((k, child))

        og.E = edges
        og.toporder()

        nd = NodeData()
        nd.Vdata = self.nodes

        #logging.debug(pprint.pformat(nd.Vdata))

        self.net = DiscreteBayesianNetwork(og, nd)
        self.factor_net = TableCPDFactorization(self.net)
Ejemplo n.º 24
0
import json

from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.lgbayesiannetwork import LGBayesianNetwork
from libpgm.pgmlearner import PGMLearner

# generate some data to use
nd = NodeData()
nd.load("gaussGrades.txt")  # an input file
skel = GraphSkeleton()
skel.load("gaussGrades.txt")
skel.toporder()
lgbn = LGBayesianNetwork(skel, nd)
data = lgbn.randomsample(8000)

print data

# instantiate my learner
learner = PGMLearner()

# estimate structure
result = learner.lg_constraint_estimatestruct(data)

# output
print json.dumps(result.E, indent=2)
Ejemplo n.º 25
0
def discrete_nodedata_from_ros(nodes):
    nd = NodeData()
    nd.Vdata = {n.name: dict_from_ros_discrete_node(n) for n in nodes}
    return nd
    rospy.init_node("pgm_learner_sample_discrete")

    param_estimate = rospy.ServiceProxy(
        "pgm_learner/discrete/parameter_estimation",
        DiscreteParameterEstimation)

    req = DiscreteParameterEstimationRequest()

    dpath = os.path.join(PKG_PATH, "test", "graph-test.txt")
    tpath = dpath

    # load graph structure
    skel = GraphSkeleton()
    skel.load(dpath)
    req.graph.nodes = skel.V
    req.graph.edges = [GraphEdge(k, v) for k, v in skel.E]
    skel.toporder()

    # generate trial data
    teacher_nd = NodeData()
    teacher_nd.load(dpath)
    bn = DiscreteBayesianNetwork(skel, teacher_nd)
    data = bn.randomsample(200)
    for v in data:
        gs = DiscreteGraphState()
        for k_s, v_s in v.items():
            gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
        req.states.append(gs)

    PP.pprint(param_estimate(req).nodes)
Ejemplo n.º 27
0
locatValsList = ["Idle", "Bed", "Hall", "Both"]
activValsList = ["Away", "Sleeping", "Wandering", "Reading", "Diverse"]
dictionary = set().union(wkdayValsList, hourValsList, locatValsList,
                         activValsList)

# checking if input from user was approppriate
if set(userinput).issubset(dictionary):
    # initializing probabilities lists
    wkdayProbList = []
    hourProbList = []
    locatProbList = []
    activProbList = []

    #INITIALIZING BN 1
    # load nodedata and graphskeleton
    nd1 = NodeData()
    skel1 = GraphSkeleton()
    nd1.load(path_bn1)
    skel1.load(path_bn1)
    skel1.toporder()  # toporder graph skeleton

    #INITIALIZING BN 2
    # load nodedata and graphskeleton
    nd2 = NodeData()
    skel2 = GraphSkeleton()
    nd2.load(path_bn2)
    skel2.load(path_bn2)
    skel2.toporder()  # toporder graph skeleton

    # FINDING NEXT ACTIVITY ATTRIBUTES THROUGH INFERENCE ON BN 1
    # wkday variable query
from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork

from inference.exact_inference import ExactInferenceEngine
from inference.approximate_inference import ApproximateInferenceEngine

node_data = NodeData()
network_skeleton = GraphSkeleton()
node_data.load('test_bayesian_networks/network.txt')
network_skeleton.load('test_bayesian_networks/network.txt')
network = DiscreteBayesianNetwork(network_skeleton, node_data)

exact_inference_engine = ExactInferenceEngine(network)
approximate_inference_engine = ApproximateInferenceEngine(network)

query_variable = 'Burglary'
evidence_variables = {'MaryCalls': 'true', 'JohnCalls': 'true'}
resulting_distribution = exact_inference_engine.perform_inference(
    query_variable, evidence_variables)
print 'P(B|m,j) - enumeration: ', resulting_distribution
resulting_distribution = exact_inference_engine.perform_ve_inference(
    query_variable, evidence_variables)
print '(B|m,j) - variable elimination: ', resulting_distribution
resulting_distribution = approximate_inference_engine.perform_rs_inference(
    query_variable, evidence_variables, 100000)
print 'P(B|m,j) - approximate - rejection sampling: ', resulting_distribution
resulting_distribution = approximate_inference_engine.perform_lw_inference(
    query_variable, evidence_variables, 100000)
print 'P(B|m,j) - approximate - likelihood weighting: ', resulting_distribution
resulting_distribution = approximate_inference_engine.perform_gibbs_inference(