Exemple #1
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
Exemple #2
0
 def set_bayesnet(self):
     nd = NodeData()
     skel = GraphSkeleton()
     nd.load(self.file)
     skel.load(self.file)
     skel.toporder()
     self.bn = DiscreteBayesianNetwork(skel, nd)
Exemple #3
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittestdyndict.txt")
     self.skel = GraphSkeleton()
     self.skel.load("unittestdyndict.txt")
     self.skel.toporder()
     self.d = DynDiscBayesianNetwork(self.skel, self.nd)
Exemple #4
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
Exemple #5
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     self.fn = TableCPDFactorization(self.bn)
Exemple #6
0
    def setUp(self):
        nodedata = NodeData()
        nodedata.load("unittestlgdict.txt")
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        self.lgb = LGBayesianNetwork(skel, nodedata)
Exemple #7
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     self.fn = TableCPDFactorization(self.bn)
Exemple #8
0
    def setUp(self):
        nodedata = NodeData()
        nodedata.load("unittestlgdict.txt")
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        self.lgb = LGBayesianNetwork(skel, nodedata)
Exemple #9
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittesthdict.txt")
     self.nd.entriestoinstances()
     self.skel = GraphSkeleton()
     self.skel.load("unittestdict.txt")
     self.skel.toporder()
     self.hybn = HyBayesianNetwork(self.skel, self.nd)
Exemple #10
0
def getTableCPD():
    nd = NodeData()
    skel = GraphSkeleton()
    jsonpath = ""
    nd.load(jsonpath)
    skel.load(jsonpath)
    bn = DiscreteBayesianNetwork(skel, nd)
    tablecpd = TableCPDFactorization(bn)
    return tablecpd
Exemple #11
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
     self.factor = TableCPDFactor("Grade", self.instance)
     self.factor2 = TableCPDFactor("Letter", self.instance)
Exemple #12
0
class TestNodeData(unittest.TestCase):
    def setUp(self):
        self.nd = NodeData()

    def test_entriestoinstances(self):
        self.nd.load("unittesthdict.txt")
        self.nd.entriestoinstances()
        result = self.nd.nodes["Intelligence"].choose([])
        self.assertTrue(result == 'low' or result == 'high')
Exemple #13
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.instance = DiscreteBayesianNetwork(skel, nodedata)
     self.factor = TableCPDFactor("Grade", self.instance)
     self.factor2 = TableCPDFactor("Letter", self.instance)
def q_without_ros():
    skel = GraphSkeleton()
    skel.V = ["prize_door", "guest_door", "monty_door"]
    skel.E = [["prize_door", "monty_door"],
              ["guest_door", "monty_door"]]
    skel.toporder()
    nd = NodeData()
    nd.Vdata = {
        "prize_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0/3, 1.0/3, 1.0/3],
        },
        "guest_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0/3, 1.0/3, 1.0/3],
        },
        "monty_door": {
            "numoutcomes": 3,
            "parents": ["prize_door", "guest_door"],
            "children": None,
            "vals": ["A", "B", "C"],
            "cprob": {
                "['A', 'A']": [0., 0.5, 0.5],
                "['B', 'B']": [0.5, 0., 0.5],
                "['C', 'C']": [0.5, 0.5, 0.],
                "['A', 'B']": [0., 0., 1.],
                "['A', 'C']": [0., 1., 0.],
                "['B', 'A']": [0., 0., 1.],
                "['B', 'C']": [1., 0., 0.],
                "['C', 'A']": [0., 1., 0.],
                "['C', 'B']": [1., 0., 0.],
            },
        },
    }
    bn = DiscreteBayesianNetwork(skel, nd)
    fn = TableCPDFactorization(bn)

    query = {
        "prize_door": ["A","B","C"],
    }
    evidence = {
        "guest_door": "A",
        "monty_door": "B",
    }

    res = fn.condprobve(query, evidence)
    print res.vals
    print res.scope
    print res.card
    print res.stride
def getTableCPD():
    nd = NodeData()
    skel = GraphSkeleton()
    jsonpath = "./graph/graph_example.txt"
    nd.load(jsonpath)
    skel.load(jsonpath)
    # load Bayesian network
    bn = DiscreteBayesianNetwork(skel, nd)
    tablecpd = TableCPDFactorization(bn)
    return tablecpd
Exemple #16
0
class TestNodeData(unittest.TestCase):

    def setUp(self):
        self.nd = NodeData()

    def test_entriestoinstances(self):
        self.nd.load("unittesthdict.txt")
        self.nd.entriestoinstances()
        result = self.nd.nodes["Intelligence"].choose([])
        self.assertTrue(result == 'low' or result == 'high')
def q_without_ros():
    skel = GraphSkeleton()
    skel.V = ["prize_door", "guest_door", "monty_door"]
    skel.E = [["prize_door", "monty_door"], ["guest_door", "monty_door"]]
    skel.toporder()
    nd = NodeData()
    nd.Vdata = {
        "prize_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0 / 3, 1.0 / 3, 1.0 / 3],
        },
        "guest_door": {
            "numoutcomes": 3,
            "parents": None,
            "children": ["monty_door"],
            "vals": ["A", "B", "C"],
            "cprob": [1.0 / 3, 1.0 / 3, 1.0 / 3],
        },
        "monty_door": {
            "numoutcomes": 3,
            "parents": ["prize_door", "guest_door"],
            "children": None,
            "vals": ["A", "B", "C"],
            "cprob": {
                "['A', 'A']": [0., 0.5, 0.5],
                "['B', 'B']": [0.5, 0., 0.5],
                "['C', 'C']": [0.5, 0.5, 0.],
                "['A', 'B']": [0., 0., 1.],
                "['A', 'C']": [0., 1., 0.],
                "['B', 'A']": [0., 0., 1.],
                "['B', 'C']": [1., 0., 0.],
                "['C', 'A']": [0., 1., 0.],
                "['C', 'B']": [1., 0., 0.],
            },
        },
    }
    bn = DiscreteBayesianNetwork(skel, nd)
    fn = TableCPDFactorization(bn)

    query = {
        "prize_door": ["A", "B", "C"],
    }
    evidence = {
        "guest_door": "A",
        "monty_door": "B",
    }

    res = fn.condprobve(query, evidence)
    print res.vals
    print res.scope
    print res.card
    print res.stride
Exemple #18
0
    def load(self, file_name):
        #### Load BN
        nd = NodeData()
        skel = GraphSkeleton()
        nd.load(file_name)  # any input file
        skel.load(file_name)

        # topologically order graphskeleton
        skel.toporder()

        super(DiscreteBayesianNetworkExt, self).__init__(skel, nd)
def getTableCPD():
   nd = NodeData()
   skel = GraphSkeleton()
   jsonpath = "job_interview.txt"
   nd.load(jsonpath)
   skel.load(jsonpath)

   #load bayesian network
   bn = DiscreteBayesianNetwork(skel, nd)
   tablecpd = TableCPDFactorization(bn)
   return tablecpd
 def test_query(self):
     teacher_nd = NodeData()
     teacher_nd.load(self.teacher_data_path)
     req = DiscreteQueryRequest()
     req.nodes = U.discrete_nodes_to_ros(teacher_nd.Vdata)
     req.evidence = [DiscreteNodeState("Letter", "weak")]
     req.query = ["Grade"]
     res = self.query(req)
     self.assertEqual(len(res.nodes), 1)
     n = res.nodes[0]
     self.assertEqual(n.name, "Grade")
     self.assertListEqual(['A', 'B', 'C'], n.outcomes)
    def load(self, file_name):
        #### Load BN
        nd = NodeData()
        skel = GraphSkeleton()
        nd.load(file_name)  # any input file
        skel.load(file_name)

        # topologically order graphskeleton
        skel.toporder()

        super(DiscreteBayesianNetworkExt, self).__init__(skel, nd)
        ##TODO load evidence
 def test_query(self):
     teacher_nd = NodeData()
     teacher_nd.load(self.teacher_data_path)
     req = DiscreteQueryRequest()
     req.nodes = U.discrete_nodes_to_ros(teacher_nd.Vdata)
     req.evidence = [DiscreteNodeState("Letter", "weak")]
     req.query = ["Grade"]
     res = self.query(req)
     self.assertEqual(len(res.nodes), 1)
     n = res.nodes[0]
     self.assertEqual(n.name, "Grade")
     self.assertListEqual(['A','B','C'], n.outcomes)
Exemple #23
0
def loadbn(param_file):
    """
    This function loads the bn model into the workspace from its associated .txt file.
    """
    file_path = os.path.join(experiment_dir, 'parameters', param_file + '.txt')

    nd = NodeData()
    skel = GraphSkeleton()
    nd.load(file_path)
    skel.load(file_path)
    skel.toporder()
    bn = DiscreteBayesianNetwork(skel, nd)
    return bn
Exemple #24
0
class TestDynDiscBayesianNetwork(unittest.TestCase):
    def setUp(self):
        self.nd = NodeData()
        self.nd.load("unittestdyndict.txt")
        self.skel = GraphSkeleton()
        self.skel.load("unittestdyndict.txt")
        self.skel.toporder()
        self.d = DynDiscBayesianNetwork(self.skel, self.nd)

    def test_randomsample(self):
        sample = self.d.randomsample(10)
        for i in range(1, 10):
            self.assertEqual(sample[0]['Difficulty'], sample[i]['Difficulty'])
Exemple #25
0
class TestDynDiscBayesianNetwork(unittest.TestCase):

    def setUp(self):
        self.nd = NodeData()
        self.nd.load("unittestdyndict.txt")
        self.skel = GraphSkeleton()
        self.skel.load("unittestdyndict.txt")
        self.skel.toporder()
        self.d = DynDiscBayesianNetwork(self.skel, self.nd)

    def test_randomsample(self):
        sample = self.d.randomsample(10)
        for i in range(1, 10):
            self.assertEqual(sample[0]['Difficulty'], sample[i]['Difficulty'])
Exemple #26
0
class TestHyBayesianNetwork(unittest.TestCase):
    def setUp(self):
        self.nd = NodeData()
        self.nd.load("unittesthdict.txt")
        self.nd.entriestoinstances()
        self.skel = GraphSkeleton()
        self.skel.load("unittestdict.txt")
        self.skel.toporder()
        self.hybn = HyBayesianNetwork(self.skel, self.nd)

    def test_randomsample(self):
        sample = self.hybn.randomsample(1)[0]
        self.assertTrue(isinstance(sample['Grade'], float))
        self.assertTrue(isinstance(sample['Intelligence'], str))
        self.assertEqual(sample["SAT"][-12:], 'blueberries!')
Exemple #27
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     agg = SampleAggregator()
     agg.aggregate(self.bn.randomsample(50))
     self.rseq = agg.seq
     self.ravg = agg.avg
     self.fn = TableCPDFactorization(self.bn)
     evidence = dict(Letter='weak')
     agg.aggregate(self.fn.gibbssample(evidence, 51))
     self.gseq = agg.seq
     self.gavg = agg.avg
Exemple #28
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittestdyndict.txt")
     self.skel = GraphSkeleton()
     self.skel.load("unittestdyndict.txt")
     self.skel.toporder()
     self.d = DynDiscBayesianNetwork(self.skel, self.nd)
Exemple #29
0
class TestHyBayesianNetwork(unittest.TestCase):

    def setUp(self):
        self.nd = NodeData()
        self.nd.load("unittesthdict.txt")
        self.nd.entriestoinstances()
        self.skel = GraphSkeleton()
        self.skel.load("unittestdict.txt")
        self.skel.toporder()
        self.hybn = HyBayesianNetwork(self.skel, self.nd)

    def test_randomsample(self):
        sample = self.hybn.randomsample(1)[0]
        self.assertTrue(isinstance(sample['Grade'], float))
        self.assertTrue(isinstance(sample['Intelligence'], str))
        self.assertEqual(sample["SAT"][-12:], 'blueberries!')
def createData():
   nd = NodeData()
   skel = GraphSkeleton()
   fpath = "job_interview.txt"
   nd.load(fpath)
   skel.load(fpath)
   skel.toporder()
   bn = DiscreteBayesianNetwork(skel, nd)

   learner = PGMLearner()
   data = bn.randomsample(1000)
   X, Y = 'Grades', 'Offer'
   c,p,w=learner.discrete_condind(data, X, Y, ['Interview'])
   print "independence between X and Y: ", c, " p-value ", p, " witness node: ", w
   result = learner.discrete_constraint_estimatestruct(data)
   print result.E
Exemple #31
0
 def setUp(self):
     skel = GraphSkeleton()
     skel.load("unittestdict.txt")
     skel.toporder()
     nodedata = NodeData()
     nodedata.load("unittestdict.txt")
     self.bn = DiscreteBayesianNetwork(skel, nodedata)
     agg = SampleAggregator()
     agg.aggregate(self.bn.randomsample(50))
     self.rseq = agg.seq
     self.ravg = agg.avg
     self.fn = TableCPDFactorization(self.bn)
     evidence = dict(Letter='weak')
     agg.aggregate(self.fn.gibbssample(evidence, 51))
     self.gseq = agg.seq
     self.gavg = agg.avg
Exemple #32
0
 def setUp(self):
     self.nd = NodeData()
     self.nd.load("unittesthdict.txt")
     self.nd.entriestoinstances()
     self.skel = GraphSkeleton()
     self.skel.load("unittestdict.txt")
     self.skel.toporder()
     self.hybn = HyBayesianNetwork(self.skel, self.nd)
Exemple #33
0
 def construct(self):
     skel = GraphSkeleton()
     skel.V = self.nodes.keys()
     skel.E = []
     for node, ndata in self.nodes.iteritems():
         if ndata['parents']:
             for p in ndata['parents']:
                 skel.E.append([p, node])
                 self.nodes[p]['children'].append(node)
     for node, ndata in self.nodes.iteritems():
         if len(ndata['children']) == 0:
             ndata['children'] = None
     data = NodeData()
     data.Vdata = self.nodes
     skel.toporder()
     bn = DiscreteBayesianNetwork(skel, data)
     return bn
Exemple #34
0
def net2():
    nd = NodeData()
    skel = GraphSkeleton()
    nd.load("net.txt")  # an input file
    skel.load("net.txt")

    # topologically order graphskeleton
    skel.toporder()

    # load bayesian network
    lgbn = LGBayesianNetwork(skel, nd)

    in_data = read_data.getdata2()
    learner = PGMLearner()
    bn = learner.lg_mle_estimateparams(skel, in_data)

    p = cal_prob(in_data[300:500], bn)
    print p
    return 0
def net2():
    nd = NodeData()
    skel = GraphSkeleton()
    nd.load("net.txt")  # an input file
    skel.load("net.txt")

    # topologically order graphskeleton
    skel.toporder()

    # load bayesian network
    lgbn = LGBayesianNetwork(skel, nd)

    in_data=read_data.getdata2()
    learner = PGMLearner()
    bn=learner.lg_mle_estimateparams(skel,in_data)

    p=cal_prob(in_data[300:500],bn)
    print p
    return 0
Exemple #36
0
    def setUp(self):
        # instantiate learner
        self.l = PGMLearner()

        # generate graph skeleton
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        # generate sample sequence to try to learn from - discrete
        nd = NodeData.load("unittestdict.txt")
        self.samplediscbn = DiscreteBayesianNetwork(nd)
        self.samplediscseq = self.samplediscbn.randomsample(5000)

        # generate sample sequence to try to learn from - discrete
        nda = NodeData.load("unittestlgdict.txt")
        self.samplelgbn = LGBayesianNetwork(nda)
        self.samplelgseq = self.samplelgbn.randomsample(10000)

        self.skel = skel
Exemple #37
0
    def setUp(self):
        # instantiate learner
        self.l = PGMLearner()

        # generate graph skeleton
        skel = GraphSkeleton()
        skel.load("unittestdict.txt")
        skel.toporder()

        # generate sample sequence to try to learn from - discrete
        nd = NodeData.load("unittestdict.txt")
        self.samplediscbn = DiscreteBayesianNetwork(nd)
        self.samplediscseq = self.samplediscbn.randomsample(5000)

        # generate sample sequence to try to learn from - discrete
        nda = NodeData.load("unittestlgdict.txt")
        self.samplelgbn = LGBayesianNetwork(nda)
        self.samplelgseq = self.samplelgbn.randomsample(10000)

        self.skel = skel
    def test_structure_estimation(self):
        req = DiscreteStructureEstimationRequest()

        skel = GraphSkeleton()
        skel.load(self.data_path)
        skel.toporder()
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(8000)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        res = self.struct_estimate(req)
        self.assertIsNotNone(res.graph)
        self.assertEqual(len(res.graph.nodes), 5)
        self.assertGreater(len(res.graph.edges), 0)
    def test_structure_estimation(self):
        req = DiscreteStructureEstimationRequest()

        skel = GraphSkeleton()
        skel.load(self.data_path)
        skel.toporder()
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(8000)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        res = self.struct_estimate(req)
        self.assertIsNotNone(res.graph)
        self.assertEqual(len(res.graph.nodes), 5)
        self.assertGreater(len(res.graph.edges), 0)
Exemple #40
0
def main():

    in_data = read_data.getdata()
    f_data = format_data(in_data)
    nd = NodeData()
    nd.load("net4.txt")  # an input file
    skel = GraphSkeleton()
    skel.load("net4.txt")
    skel.toporder()
    bn = DiscreteBayesianNetwork(skel, nd)

    #training dataset:70%
    bn2 = em(f_data[1:6000], bn, skel)

    pr_training = precision(f_data[1:6000], bn2)

    print "Prediction accuracy for training data:", pr_training[1]

    #testing dataset:30%
    pr = precision(f_data[6700:6800], bn2)
    print "Prediction accuracy for test data:", pr[1]
def main():

    in_data=read_data.getdata()
    f_data=format_data(in_data)
    nd = NodeData()
    nd.load("net4.txt")    # an input file
    skel = GraphSkeleton()
    skel.load("net4.txt")
    skel.toporder()
    bn=DiscreteBayesianNetwork(skel,nd)


#training dataset:70%
    bn2=em(f_data[1:6000],bn,skel)

    pr_training = precision(f_data[1:6000],bn2)

    print "Prediction accuracy for training data:" , pr_training[1]

#testing dataset:30%
    pr=precision(f_data[6700:6800],bn2)
    print "Prediction accuracy for test data:", pr[1]
    def test_param_estimation(self):
        req = DiscreteParameterEstimationRequest()

        # load graph structure
        skel = GraphSkeleton()
        skel.load(self.data_path)
        req.graph.nodes = skel.V
        req.graph.edges = [GraphEdge(k, v) for k,v in skel.E]
        skel.toporder()

        # generate trial data
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(200)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        self.assertEqual(len(self.param_estimate(req).nodes), 5)
    def test_param_estimation(self):
        req = DiscreteParameterEstimationRequest()

        # load graph structure
        skel = GraphSkeleton()
        skel.load(self.data_path)
        req.graph.nodes = skel.V
        req.graph.edges = [GraphEdge(k, v) for k, v in skel.E]
        skel.toporder()

        # generate trial data
        teacher_nd = NodeData()
        teacher_nd.load(self.teacher_data_path)
        bn = DiscreteBayesianNetwork(skel, teacher_nd)
        data = bn.randomsample(200)
        for v in data:
            gs = DiscreteGraphState()
            for k_s, v_s in v.items():
                gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
            req.states.append(gs)

        self.assertEqual(len(self.param_estimate(req).nodes), 5)
Exemple #44
0
Fichier : pgm.py Projet : ml4ai/b3
 def setup(self):
     self.nd = NodeData()
     self.skel = GraphSkeleton()
     self.skel.V, self.skel.E = [], []
     self.nd.Vdata = {}
     for i, node in enumerate(self.node.values()):
         dNode = {}
         node.sId = str(i)
         dNode["numoutcomes"] = len(node.values)
         dNode["vals"] = node.values
         dNode["cprob"] = node.cpt
         #             dNode["parents"] = map(lambda x: if x=x.name, node.parents);
         self.skel.V.append(node.name)
         aParents = []
         for parent in node.parents:
             if parent == None: continue
             aParents.append(parent.name)
             self.skel.E.append([parent.name, node.name])
         dNode["parents"] = aParents if len(aParents) > 0 else None
         self.nd.Vdata[node.name] = dNode
     self.skel.toporder()
     self.bn = DiscreteBayesianNetwork(self.skel, self.nd)
     self.fn = TableCPDFactorization(self.bn)
if __name__ == '__main__':
    rospy.init_node("pgm_learner_sample_discrete")

    param_estimate = rospy.ServiceProxy("pgm_learner/discrete/parameter_estimation", DiscreteParameterEstimation)

    req = DiscreteParameterEstimationRequest()

    dpath = os.path.join(PKG_PATH, "test", "graph-test.txt")
    tpath = dpath

    # load graph structure
    skel = GraphSkeleton()
    skel.load(dpath)
    req.graph.nodes = skel.V
    req.graph.edges = [GraphEdge(k, v) for k,v in skel.E]
    skel.toporder()

    # generate trial data
    teacher_nd = NodeData()
    teacher_nd.load(dpath)
    bn = DiscreteBayesianNetwork(skel, teacher_nd)
    data = bn.randomsample(200)
    for v in data:
        gs = DiscreteGraphState()
        for k_s, v_s in v.items():
            gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
        req.states.append(gs)

    PP.pprint(param_estimate(req).nodes)
Exemple #46
0
 def setUp(self):
     self.nd = NodeData()
Exemple #47
0
import json

from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.lgbayesiannetwork import LGBayesianNetwork
from libpgm.pgmlearner import PGMLearner

# generate some data to use
nd = NodeData()
nd.load("gaussGrades.txt")  # an input file
skel = GraphSkeleton()
skel.load("gaussGrades.txt")
skel.toporder()
lgbn = LGBayesianNetwork(skel, nd)
data = lgbn.randomsample(8000)

print data

# instantiate my learner
learner = PGMLearner()

# estimate structure
result = learner.lg_constraint_estimatestruct(data)

# output
print json.dumps(result.E, indent=2)
    param_estimate = rospy.ServiceProxy(
        "pgm_learner/linear_gaussian/parameter_estimation", LinearGaussianParameterEstimation
    )

    req = LinearGaussianParameterEstimationRequest()

    dpath = os.path.join(PKG_PATH, "test", "graph-test.txt")
    tpath = os.path.join(PKG_PATH, "test", "graph-lg-test.txt")

    # load graph structure
    skel = GraphSkeleton()
    skel.load(dpath)
    req.graph.nodes = skel.V
    req.graph.edges = [GraphEdge(k, v) for k, v in skel.E]
    skel.toporder()

    # generate trial data
    teacher_nd = NodeData()
    teacher_nd.load(tpath)
    bn = LGBayesianNetwork(skel, teacher_nd)
    data = bn.randomsample(200)

    for v in data:
        gs = LinearGaussianGraphState()
        for k_s, v_s in v.items():
            gs.node_states.append(LinearGaussianNodeState(node=k_s, state=v_s))
        req.states.append(gs)

    PP.pprint(param_estimate(req).nodes)
import json

from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.tablecpdfactorization import TableCPDFactorization

# load nodedata and graphskeleton
nd = NodeData()
skel = GraphSkeleton()
nd.load("tests/net1.json")    # any input file
skel.load("tests/net1.json")

# topologically order graphskeleton
skel.toporder()

# load bayesian network
bn = DiscreteBayesianNetwork(skel, nd)

fn = TableCPDFactorization(bn)


# sample 
result = fn.specificquery(dict(C='T'), dict(B='F'))

# output
print json.dumps(result, indent=2)
Exemple #50
0
# print skel
#
# # instantiate my learner
# learner = PGMLearner()
#
# # estimate parameters
# result = learner.discrete_mle_estimateparams(skel, data)
#
# # output - toggle comment to see
# print json.dumps(result.Vdata, indent=2)

# (5) --------------------------------------------------------------------------
# Compute the probability distribution over a specific node or nodes

# load nodedata and graphskeleton
nd = NodeData()
skel = GraphSkeleton()
nd.load("../tests/unittestdict.txt")
skel.load("../tests/unittestdict.txt")

# toporder graph skeleton
print skel.toporder()

# load evidence
evidence = {"Intelligence": "high"}
query = {"Grade": "A"}

# load bayesian network
bn = DiscreteBayesianNetwork(skel, nd)

# load factorization
Exemple #51
0
def discrete_nodedata_from_ros(nodes):
    nd = NodeData()
    nd.Vdata = {n.name: dict_from_ros_discrete_node(n) for n in nodes}
    return nd

data_l = []
for line in data_r.readlines():
	data_l.append(map(int, line.split()))

truth_l = []
for row in truth_r:
	truth_l.append(row[0])

w = csv.writer(open("bayesian_outcome.txt", "wb"))

count = 0

for  i in range(104):
	nd = NodeData()
	skel = GraphSkeleton()
	nd.load('bayes_net/'+str(i)+".txt")    # any input file
	skel.load('bayes_net/'+str(i)+".txt")

	# topologically order graphskeleton
	skel.toporder()

	# load bayesian network
	# load bayesian network
	bn = DiscreteBayesianNetwork(skel, nd)
	dic1 = {}
	k = 1
	for c in data_l[i]:
		dic1[str(k)] = str(c)
		k += 2
Exemple #53
0
def Threshold(list):
    temp = []
    #temp.append(min(list)+float(max(list) - min(list))*1/3)
    #temp.append(min(list)+float(max(list) - min(list))*2/3)
    temp.append(float(max(list))/3)
    temp.append(float(max(list))/3*2)
    return temp
    
EachLikeThreshold = Threshold(EachLike) 
EachLikedThreshold = Threshold(EachLiked)
print EachLikeThreshold
print EachLikedThreshold

BulliedPro = []
nd = NodeData()
skel = GraphSkeleton()
nd.load('unittestdict.txt')
skel.load('unittestdict.txt')
bn = DiscreteBayesianNetwork(skel, nd)
fn = TableCPDFactorization(bn)

for i in range(len(EachLike)):
    evidence = {}
    if EachLike[i] <= EachLikeThreshold[0]:
        evidence['LikeN'] = 'Small'
    elif EachLikeThreshold[0] < EachLike[i] and EachLike[i] <= EachLikeThreshold[1]:
        evidence['LikeN'] = 'Mid'
    else:
        evidence['LikeN'] = 'Big'
    if EachLiked[i] <= EachLikedThreshold[0]:
    rospy.init_node("pgm_learner_sample_discrete")

    param_estimate = rospy.ServiceProxy(
        "pgm_learner/discrete/parameter_estimation",
        DiscreteParameterEstimation)

    req = DiscreteParameterEstimationRequest()

    dpath = os.path.join(PKG_PATH, "test", "graph-test.txt")
    tpath = dpath

    # load graph structure
    skel = GraphSkeleton()
    skel.load(dpath)
    req.graph.nodes = skel.V
    req.graph.edges = [GraphEdge(k, v) for k, v in skel.E]
    skel.toporder()

    # generate trial data
    teacher_nd = NodeData()
    teacher_nd.load(dpath)
    bn = DiscreteBayesianNetwork(skel, teacher_nd)
    data = bn.randomsample(200)
    for v in data:
        gs = DiscreteGraphState()
        for k_s, v_s in v.items():
            gs.node_states.append(DiscreteNodeState(node=k_s, state=v_s))
        req.states.append(gs)

    PP.pprint(param_estimate(req).nodes)
Exemple #55
0
    def __init__(self, nodes):

        self.nodes = {}

        self.children = defaultdict(list)
        self.parents = defaultdict(list)
        self.outputs = {}
        for name, node_spec in nodes.iteritems():
            node_type = node_spec["type"]
            if node_type == "inferred":
                parents = node_spec["parents"]
                # store the relationship between these elements
                for parent in parents:
                    normalised = normalise_name(parent)
                    self.parents[name].append(normalised)
                    self.children[normalised].append(name)
                truth_table = parse_truth_table(node_spec["p"], parents)
                node = make_node(truth_table, parents, node_type)
                self.nodes[name] = node

            if node_type == "fsm_input":
                node = make_node([1.0, 0.0], None, node_type)
                self.nodes[name] = node

            if node_type == "sensor_input":
                proxy_node = make_node([1.0, 0.0], None, "proxy")
                proxy_name = "_proxy_%s" % name
                self.nodes[proxy_name] = proxy_node
                self.children[proxy_name].append(name)
                node = make_node({
                    "['T']": [1.0, 0.0],
                    "['F']": [0.0, 1.0]
                }, [proxy_name], node_type)
                self.nodes[name] = node
            if node_type == "output":
                self.outputs[name] = node_spec

        for node in self.nodes:
            if len(self.children[node]) > 0:
                self.nodes[node]["children"] = self.children[node]
            else:
                self.nodes[node]["children"] = None

        # certainty scaling
        self.event_caution = 0.0

        og = OrderedSkeleton()
        og.V = self.nodes.keys()
        edges = []
        for k, children in self.children.iteritems():
            for child in children:
                edges.append((k, child))

        og.E = edges
        og.toporder()

        nd = NodeData()
        nd.Vdata = self.nodes

        #logging.debug(pprint.pformat(nd.Vdata))

        self.net = DiscreteBayesianNetwork(og, nd)
        self.factor_net = TableCPDFactorization(self.net)
import json

from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.pgmlearner import PGMLearner

nd = NodeData()
nd.load("nodedata.json")
skel = GraphSkeleton()
skel.load("nodedata.json")
skel.toporder()

bn = DiscreteBayesianNetwork(skel,nd)
with open("manipulatedata.json") as fp:
    data = json.load(fp)

learner = PGMLearner()

# result = learner.discrete_constraint_estimatestruct(data)
result = learner.discrete_estimatebn(data)

print json.dumps(result.E, indent=2)
print json.dumps(result.Vdata, indent=2)
import json

from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.pgmlearner import PGMLearner

# generate some data to use
nd = NodeData()
nd.load("grades.txt")    # an input file
skel = GraphSkeleton()
skel.load("grades.txt")
skel.toporder()
bn = DiscreteBayesianNetwork(skel, nd)
data = bn.randomsample(80000)

# instantiate my learner 
learner = PGMLearner()

# estimate structure
result = learner.discrete_constraint_estimatestruct(data)

# output
print json.dumps(result.E, indent=2)
Exemple #58
0
from libpgm.nodedata import NodeData
from libpgm.graphskeleton import GraphSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.lgbayesiannetwork import LGBayesianNetwork
from libpgm.hybayesiannetwork import HyBayesianNetwork
from libpgm.dyndiscbayesiannetwork import DynDiscBayesianNetwork
from libpgm.tablecpdfactorization import TableCPDFactorization
from libpgm.sampleaggregator import SampleAggregator
from libpgm.pgmlearner import PGMLearner

# (1) ---------------------------------------------------------------------
# Generate a sequence of samples from a discrete-CPD Bayesian network

# load nodedata and graphskeleton
nd = NodeData()
skel = GraphSkeleton()
nd.load("../tests/unittestdict.txt")
skel.load("../tests/unittestdict.txt")

# topologically order graphskeleton
skel.toporder()

# load bayesian network
bn = DiscreteBayesianNetwork(skel, nd)

# sample 
result = bn.randomsample(10)

# output - toggle comment to see
#print json.dumps(result, indent=2)
Exemple #59
0
locatValsList = ["Idle", "Bed", "Hall", "Both"]
activValsList = ["Away", "Sleeping", "Wandering", "Reading", "Diverse"]
dictionary = set().union(wkdayValsList, hourValsList, locatValsList,
                         activValsList)

# checking if input from user was approppriate
if set(userinput).issubset(dictionary):
    # initializing probabilities lists
    wkdayProbList = []
    hourProbList = []
    locatProbList = []
    activProbList = []

    #INITIALIZING BN 1
    # load nodedata and graphskeleton
    nd1 = NodeData()
    skel1 = GraphSkeleton()
    nd1.load(path_bn1)
    skel1.load(path_bn1)
    skel1.toporder()  # toporder graph skeleton

    #INITIALIZING BN 2
    # load nodedata and graphskeleton
    nd2 = NodeData()
    skel2 = GraphSkeleton()
    nd2.load(path_bn2)
    skel2.load(path_bn2)
    skel2.toporder()  # toporder graph skeleton

    # FINDING NEXT ACTIVITY ATTRIBUTES THROUGH INFERENCE ON BN 1
    # wkday variable query
Exemple #60
0
 def setUp(self):
     self.nd = NodeData()