from gPy.Models import FR, BN from gPy.Parameters import Factor, CPT from gPy.Variables import Domain from random import choice, randrange, uniform, shuffle import operator, unittest, pickle xor = BN(domain=Domain(), new_domain_variables={ 'a': [0, 1], 'b': [0, 1], 'c': [0, 1] }) xor.add_cpts([ CPT(Factor(variables=['a'], data=[0.5, 0.5]), child='a'), CPT(Factor(variables=['b'], data=[0.5, 0.5]), child='b'), CPT(Factor(variables=['c', 'a', 'b'], data=[1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0]), child='c') ]) cbn_small_names = ['xor', 'minibn', 'asia'] cbn_small_test_cases = [xor, minibn, asia] cbn_large_names = ['alarm', 'insurance', 'carpo'] try: # load the pickled large Bayes nets. cbn_large_test_cases = map( lambda fn: pickle.load(open('networks/' + fn + '_bn.pck', 'r')), cbn_large_names) except: cbn_large_names = [] cbn_large_test_cases = []
def disp(fn, samples): f = open(fn, 'w') fact = samples.makeFactor(samples.variables()) for var in fact.variables(): print >>f, var, print >>f, 'count' for inst in fact.insts(): for i in inst: print >>f, i, print >>f, fact[inst] f.close() bn0 = BN(domain=Domain(), new_domain_variables={'a': [0,1], 'b':[0,1]}) bn0.add_cpts([CPT(Factor(variables=['a'], data=[0.5, 0.5]),child='a') ,CPT(Factor(variables=['a','b'], data=[0.3, 0.7, 0.4, 0.6]),child='b') ]) w = CausalWorld(bn0) samples = w.observe(10000) disp('two_depend', samples) bn1 = BN(domain=Domain(), new_domain_variables={'a': [0,1], 'b':[0,1]}) bn1.add_cpts([CPT(Factor(variables=['a'], data=[0.5, 0.5]),child='a') ,CPT(Factor(variables=['b'], data=[0.3, 0.7]),child='b') ]) w = CausalWorld(bn1) samples = w.observe(10000) disp('two_independ', samples) bn2 = BN(domain=Domain(), new_domain_variables={'a': [0,1,2], 'b':[0,1,2]}) bn2.add_cpts([CPT(Factor(variables=['a'], data=[1.0/3.0, 1.0/3.0, 1.0/3.0]),child='a')
def disp(fn, samples): f = open(fn, 'w') fact = samples.makeFactor(samples.variables()) for var in fact.variables(): print >> f, var, print >> f, 'count' for inst in fact.insts(): for i in inst: print >> f, i, print >> f, fact[inst] f.close() bn0 = BN(domain=Domain(), new_domain_variables={'a': [0, 1], 'b': [0, 1]}) bn0.add_cpts([ CPT(Factor(variables=['a'], data=[0.5, 0.5]), child='a'), CPT(Factor(variables=['a', 'b'], data=[0.3, 0.7, 0.4, 0.6]), child='b') ]) w = CausalWorld(bn0) samples = w.observe(10000) disp('two_depend', samples) bn1 = BN(domain=Domain(), new_domain_variables={'a': [0, 1], 'b': [0, 1]}) bn1.add_cpts([ CPT(Factor(variables=['a'], data=[0.5, 0.5]), child='a'), CPT(Factor(variables=['b'], data=[0.3, 0.7]), child='b') ]) w = CausalWorld(bn1) samples = w.observe(10000) disp('two_independ', samples) bn2 = BN(domain=Domain(),
from gPy.Examples import minibn, asia from gPy.Models import FR,BN from gPy.Parameters import Factor,CPT from gPy.Variables import Domain from random import choice,randrange,uniform,shuffle import operator, unittest, pickle xor = BN(domain=Domain(), new_domain_variables={'a': [0,1], 'b':[0,1], 'c':[0,1]}) xor.add_cpts([CPT(Factor(variables=['a'], data=[0.5, 0.5]),child='a') ,CPT(Factor(variables=['b'], data=[0.5, 0.5]),child='b') ,CPT(Factor(variables=['c','a','b'], data=[1.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0]),child='c') ]) cbn_small_names = ['xor','minibn','asia'] cbn_small_test_cases = [xor,minibn,asia] cbn_large_names = ['alarm','insurance','carpo'] try: # load the pickled large Bayes nets. cbn_large_test_cases = map(lambda fn: pickle.load(open('networks/'+fn+'_bn.pck','r')), cbn_large_names) except: cbn_large_names = [] cbn_large_test_cases = [] cbn_test_cases = cbn_small_test_cases + cbn_large_test_cases def distribution_of(model): """Returns a normalised factor representing the joint instantiation of the model. """