class TestLGBayesianNetwork(unittest.TestCase): def setUp(self): nodedata = NodeData.load("unittestlgdict.txt") skel = GraphSkeleton() skel.load("unittestdict.txt") skel.toporder() self.lgb = LGBayesianNetwork(nodedata) def test_randomsample(self): seq = self.lgb.randomsample(1) ctr = 0 for entry in seq[0].keys(): self.assertTrue(seq[0][entry], float) ctr = ctr + 1 self.assertEqual(ctr, 5)
class TestLGBayesianNetwork(unittest.TestCase): def setUp(self): nodedata = NodeData.load("unittestlgdict.txt") skel = GraphSkeleton() skel.load("unittestdict.txt") skel.toporder() self.lgb = LGBayesianNetwork(nodedata) def test_randomsample(self): seq = self.lgb.randomsample(1) ctr = 0 for entry in seq[0].keys(): self.assertTrue(seq[0][entry], float) ctr = ctr + 1 self.assertEqual(ctr, 5)
def test_structure_estimation(self): req = LinearGaussianStructureEstimationRequest() # generate trial data skel = GraphSkeleton() skel.load(self.data_path) skel.toporder() teacher_nd = NodeData() teacher_nd.load(self.teacher_data_path) bn = LGBayesianNetwork(skel, teacher_nd) data = bn.randomsample(8000) for v in data: gs = LinearGaussianGraphState() for k_s, v_s in v.items(): gs.node_states.append(LinearGaussianNodeState(node=k_s, state=v_s)) req.states.append(gs) res = self.struct_estimate(req) self.assertIsNotNone(res.graph) self.assertEqual(len(res.graph.nodes), 5) self.assertEqual(len(res.graph.edges), 4)
def test_param_estimation(self): req = LinearGaussianParameterEstimationRequest() # load graph structure skel = GraphSkeleton() skel.load(self.data_path) req.graph.nodes = skel.V req.graph.edges = [GraphEdge(k, v) for k,v in skel.E] skel.toporder() # generate trial data teacher_nd = NodeData() teacher_nd.load(self.teacher_data_path) bn = LGBayesianNetwork(skel, teacher_nd) data = bn.randomsample(200) for v in data: gs = LinearGaussianGraphState() for k_s, v_s in v.items(): gs.node_states.append(LinearGaussianNodeState(node=k_s, state=v_s)) req.states.append(gs) self.assertEqual(len(self.param_estimate(req).nodes), 5)
import json from libpgm.nodedata import NodeData from libpgm.graphskeleton import GraphSkeleton from libpgm.lgbayesiannetwork import LGBayesianNetwork from libpgm.pgmlearner import PGMLearner # generate some data to use nd = NodeData() nd.load("gaussGrades.txt") # an input file skel = GraphSkeleton() skel.load("gaussGrades.txt") skel.toporder() lgbn = LGBayesianNetwork(skel, nd) data = lgbn.randomsample(8000) print data # instantiate my learner learner = PGMLearner() # estimate structure result = learner.lg_constraint_estimatestruct(data) # output print json.dumps(result.E, indent=2)
class TestPGMLearner(unittest.TestCase): def setUp(self): # instantiate learner self.l = PGMLearner() # generate graph skeleton skel = GraphSkeleton() skel.load("unittestdict.txt") skel.toporder() # generate sample sequence to try to learn from - discrete nd = NodeData() nd.load("unittestdict.txt") self.samplediscbn = DiscreteBayesianNetwork(skel, nd) self.samplediscseq = self.samplediscbn.randomsample(5000) # generate sample sequence to try to learn from - discrete nda = NodeData() nda.load("unittestlgdict.txt") self.samplelgbn = LGBayesianNetwork(skel, nda) self.samplelgseq = self.samplelgbn.randomsample(10000) self.skel = skel def test_discrete_mle_estimateparams(self): result = self.l.discrete_mle_estimateparams(self.skel, self.samplediscseq) indexa = result.Vdata['SAT']['vals'].index('lowscore') self.assertTrue(result.Vdata['SAT']['cprob']["['low']"][indexa] < 1 and result.Vdata['SAT']['cprob']["['low']"][indexa] > .9) indexb = result.Vdata['Letter']['vals'].index('weak') self.assertTrue(result.Vdata['Letter']['cprob']["['A']"][indexb] < .15 and result.Vdata['Letter']['cprob']["['A']"][indexb] > .05) def test_lg_mle_estimateparams(self): result = self.l.lg_mle_estimateparams(self.skel, self.samplelgseq) self.assertTrue(result.Vdata['SAT']['mean_base'] < 15 and result.Vdata['SAT']['mean_base'] > 5) self.assertTrue(result.Vdata['Letter']['variance'] < 15 and result.Vdata['Letter']['variance'] > 5) def test_discrete_constraint_estimatestruct(self): result = self.l.discrete_constraint_estimatestruct(self.samplediscseq) self.assertTrue(["Difficulty", "Grade"] in result.E) def test_lg_constraint_estimatestruct(self): result = self.l.lg_constraint_estimatestruct(self.samplelgseq) self.assertTrue(["Intelligence", "Grade"] in result.E) def test_discrete_condind(self): chi, pv, witness = self.l.discrete_condind(self.samplediscseq, "Difficulty", "Letter", ["Grade"]) self.assertTrue(pv > .05) self.assertTrue(witness, ["Grade"]) chia, pva, witnessa = self.l.discrete_condind(self.samplediscseq, "Difficulty", "Intelligence", []) self.assertTrue(pva < .05) def test_discrete_estimatebn(self): result = self.l.discrete_estimatebn(self.samplediscseq) self.assertTrue(result.V) self.assertTrue(result.E) self.assertTrue(result.Vdata["Difficulty"]["cprob"][0]) def test_lg_estimatebn(self): result = self.l.lg_estimatebn(self.samplelgseq) self.assertTrue(result.V) self.assertTrue(result.E) self.assertTrue(result.Vdata["Intelligence"]["mean_base"])
class TestPGMLearner(unittest.TestCase): def setUp(self): # instantiate learner self.l = PGMLearner() # generate graph skeleton skel = GraphSkeleton() skel.load("unittestdict.txt") skel.toporder() # generate sample sequence to try to learn from - discrete nd = NodeData() nd.load("unittestdict.txt") self.samplediscbn = DiscreteBayesianNetwork(skel, nd) self.samplediscseq = self.samplediscbn.randomsample(5000) # generate sample sequence to try to learn from - discrete nda = NodeData() nda.load("unittestlgdict.txt") self.samplelgbn = LGBayesianNetwork(skel, nda) self.samplelgseq = self.samplelgbn.randomsample(10000) self.skel = skel def test_discrete_mle_estimateparams(self): result = self.l.discrete_mle_estimateparams(self.skel, self.samplediscseq) indexa = result.Vdata['SAT']['vals'].index('lowscore') self.assertTrue(result.Vdata['SAT']['cprob']["['low']"][indexa] < 1 and result.Vdata['SAT']['cprob']["['low']"][indexa] > .9) indexb = result.Vdata['Letter']['vals'].index('weak') self.assertTrue(result.Vdata['Letter']['cprob']["['A']"][indexb] < .15 and result.Vdata['Letter']['cprob']["['A']"][indexb] > .05) def test_lg_mle_estimateparams(self): result = self.l.lg_mle_estimateparams(self.skel, self.samplelgseq) self.assertTrue(result.Vdata['SAT']['mean_base'] < 15 and result.Vdata['SAT']['mean_base'] > 5) self.assertTrue(result.Vdata['Letter']['variance'] < 15 and result.Vdata['Letter']['variance'] > 5) def test_discrete_constraint_estimatestruct(self): result = self.l.discrete_constraint_estimatestruct(self.samplediscseq) self.assertTrue(["Difficulty", "Grade"] in result.E) def test_lg_constraint_estimatestruct(self): result = self.l.lg_constraint_estimatestruct(self.samplelgseq) self.assertTrue(["Intelligence", "Grade"] in result.E) def test_discrete_condind(self): chi, pv, witness = self.l.discrete_condind(self.samplediscseq, "Difficulty", "Letter", ["Grade"]) self.assertTrue(pv > .05) self.assertTrue(witness, ["Grade"]) chia, pva, witnessa = self.l.discrete_condind(self.samplediscseq, "Difficulty", "Intelligence", []) self.assertTrue(pva < .05) def test_discrete_estimatebn(self): result = self.l.discrete_estimatebn(self.samplediscseq) self.assertTrue(result.V) self.assertTrue(result.E) self.assertTrue(result.Vdata["Difficulty"]["cprob"][0]) def test_lg_estimatebn(self): result = self.l.lg_estimatebn(self.samplelgseq) self.assertTrue(result.V) self.assertTrue(result.E) self.assertTrue(result.Vdata["Intelligence"]["mean_base"])
param_estimate = rospy.ServiceProxy( "pgm_learner/linear_gaussian/parameter_estimation", LinearGaussianParameterEstimation ) req = LinearGaussianParameterEstimationRequest() dpath = os.path.join(PKG_PATH, "test", "graph-test.txt") tpath = os.path.join(PKG_PATH, "test", "graph-lg-test.txt") # load graph structure skel = GraphSkeleton() skel.load(dpath) req.graph.nodes = skel.V req.graph.edges = [GraphEdge(k, v) for k, v in skel.E] skel.toporder() # generate trial data teacher_nd = NodeData() teacher_nd.load(tpath) bn = LGBayesianNetwork(skel, teacher_nd) data = bn.randomsample(200) for v in data: gs = LinearGaussianGraphState() for k_s, v_s in v.items(): gs.node_states.append(LinearGaussianNodeState(node=k_s, state=v_s)) req.states.append(gs) PP.pprint(param_estimate(req).nodes)
# Generate a sequence of samples from a linear Gaussian-CPD Bayesian network # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() nd.load("../tests/unittestlgdict.txt") skel.load("../tests/unittestdict.txt") # topologically order graphskeleton skel.toporder() # load bayesian network lgbn = LGBayesianNetwork(skel, nd) # sample result = lgbn.randomsample(10) # output - toggle comment to see #print json.dumps(result, indent=2) # (3) ---------------------------------------------------------------------- # Generate a sequence of samples from a hybrid (any CPD type) Bayesian network. # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() nd.load("../tests/unittesthdict.txt") skel.load("../tests/unittestdict.txt") # topologically order graphskeleton skel.toporder()
# Generate a sequence of samples from a linear Gaussian-CPD Bayesian network # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() nd.load("../tests/unittestlgdict.txt") skel.load("../tests/unittestdict.txt") # topologically order graphskeleton skel.toporder() # load bayesian network lgbn = LGBayesianNetwork(skel, nd) # sample result = lgbn.randomsample(10) # output - toggle comment to see #print json.dumps(result, indent=2) # (3) ---------------------------------------------------------------------- # Generate a sequence of samples from a hybrid (any CPD type) Bayesian network. # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() nd.load("../tests/unittesthdict.txt") skel.load("../tests/unittestdict.txt") # topologically order graphskeleton skel.toporder()
if __name__ == '__main__': rospy.init_node("pgm_learner_sample_linear_gaussian") param_estimate = rospy.ServiceProxy("pgm_learner/linear_gaussian/parameter_estimation", LinearGaussianParameterEstimation) req = LinearGaussianParameterEstimationRequest() dpath = os.path.join(PKG_PATH, "test", "graph-test.txt") tpath = os.path.join(PKG_PATH, "test", "graph-lg-test.txt") # load graph structure skel = GraphSkeleton() skel.load(dpath) req.graph.nodes = skel.V req.graph.edges = [GraphEdge(k, v) for k,v in skel.E] skel.toporder() # generate trial data teacher_nd = NodeData() teacher_nd.load(tpath) bn = LGBayesianNetwork(skel, teacher_nd) data = bn.randomsample(200) for v in data: gs = LinearGaussianGraphState() for k_s, v_s in v.items(): gs.node_states.append(LinearGaussianNodeState(node=k_s, state=v_s)) req.states.append(gs) PP.pprint(param_estimate(req).nodes)
logging.basicConfig(format='[%(asctime)s] - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger('network_sampling.log') logger.setLevel(console_log_level) # load nodedata and graphskeleton nd = NodeData() nd.load(input_network) skel = GraphSkeleton() skel.load(input_network) skel.toporder() # load bayesian network lgbn = LGBayesianNetwork(skel, nd) # sample samples = lgbn.randomsample(n_size) # write out csv with open(output_csv, 'w') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=samples[0].keys()) writer.writeheader() cnt = 0 write_begin = datetime.datetime.now() logger.debug(msg="write samples to file: {0:s}.".format(output_csv)) for sample in samples: writer.writerow(sample) cnt += 1 if cnt % 100 == 0: logger.debug(msg="write out {0:d}/{1:d} sample to csv file.".format(cnt, n_size)) logger.debug(msg='write out done in {0:f} sec!'.format((datetime.datetime.now() - write_begin).total_seconds()))
import json from libpgm.nodedata import NodeData from libpgm.graphskeleton import GraphSkeleton from libpgm.lgbayesiannetwork import LGBayesianNetwork from libpgm.pgmlearner import PGMLearner # generate some data to use nd = NodeData() nd.load("gaussGrades.txt") # an input file skel = GraphSkeleton() skel.load("gaussGrades.txt") skel.toporder() lgbn = LGBayesianNetwork(skel, nd) data = lgbn.randomsample(8000) print data # instantiate my learner learner = PGMLearner() # estimate structure result = learner.lg_constraint_estimatestruct(data) # output print json.dumps(result.E, indent=2)