def bayesianOp(self, workspace, effects, cause, show=False): self.path_workspace = self.ws + workspace net = self.loadGraph(self.path_workspace) bayes = bn.BayesNet(net) if(os.path.isdir(self.path_workspace)): print("P(" + str(cause) + "|" + str(effects) + "): " + str(bayes.bayes_calc(cause, effects))) if(show): net.draw_network()
def bayesianOp_Random(self, workspace, show=False): self.path_workspace = self.ws + workspace net = self.loadGraph(self.path_workspace) cause, effects = net.random_node_pair() bayes = bn.BayesNet(net) if(os.path.isdir(self.path_workspace)): prob = bayes.bayes_calc(cause, effects) if (prob > ZERO_PROB): print("P(" + str(cause) + "|" + str(effects) + "): " + str(prob)) if(show): net.draw_network() return (prob, cause, effects)
def load_ontologia(self, demo, _from, to, init=False): #warning, load ontologia NON DEMO print(demo) self.net = nt.Net() self.net.load_graph("../ontologie/" + demo, _from, to) if(init): bayes = bn.BayesNet(self.net) bayes.inizialize_probability() ws_name = demo.split('/') ws_name = ws_name[len(ws_name)-1] self.path_workspace = self.ws + ws_name self.path_workspace = '.' + self.path_workspace.split('.')[1] if(os.path.isdir(self.path_workspace)): print(" Workspace gia' esistente ") else: os.mkdir(self.path_workspace) self.dumpGraph()
def kFold(data, k=10, structure=[0, 0, 0, 0, 0, 0], verbose=True): ''' Carries out 10-fold CV ''' x = np.repeat(list(range(k)), repeats=(len(data) / k)) data['fold'] = pd.Series(x) foldSize = len(data) / k accuracyList = [] for fold in range(k): train = data[data['fold'] != fold] test = data[data['fold'] == fold] train.drop('fold', axis=1, inplace=True) test.drop('fold', axis=1, inplace=True) net = BayesNet(4, structure) net.initGraph() net.compCPT(train) errors = 0 #Testing for i in range(len(test)): y = test.iloc[i:(i + 1)] out = net.predict(y) if out != test.iloc[i]['Class']: errors += 1 acc = float(foldSize - errors) / foldSize accuracyList.append(acc) if verbose == True: print("Fold :%d Accuracy : %f" % (fold, acc)) if verbose == True: print("Overall CV accuracy : %f" % (np.mean(accuracyList))) return (np.mean(accuracyList))
def performAnalysis(student, predictedGrades): numberofcourses = student.numberOfCourses majorAverages = student.majorAverages interests = student.interests finalgrade_probabilities = [] enjoyability_probabilities = [] for i, course in enumerate(config.upperyearcourses): grade = predictedGrades[i] numofcourses = numberofcourses[i] majorAverage = majorAverages[i] interest = interests[i] #generate the bayes net bn = BayesNet.generateBayesianNetwork(grade, majorAverage, interest, course) #perform inference predictions = bn.predict_proba( { 'Predicted Course Grade': 'Pass Course', 'Number Of Major Courses': numofcourses, 'Major Average': 'Pass Major', 'Interest': 'Interested' }, 1) #extract probabilities finalgrade_probability = predictions[0].parameters finalgrade_probability = finalgrade_probability[0]['Pass'] enjoyability_probability = predictions[1].parameters enjoyability_probability = enjoyability_probability[0]['Fun'] #store probabilities finalgrade_probabilities.append(finalgrade_probability) enjoyability_probabilities.append(enjoyability_probability) return finalgrade_probabilities, enjoyability_probabilities
''' Created on 2013-6-8 @author: Walter ''' ''' Case 1: Three parameters to learn ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() B_T = BetaNode("Burglary:T", alpha=2.0, beta=2.0) MA_TT = BetaNode("MaryCalls:T|Alarm:T", alpha=2.0, beta=2.0) ABE_TTF = BetaNode("Alarm:T|Burglary:T,Earthquake:F", alpha=2.0, beta=2.0) ''' B = DiscreteVarNode("Burglary") E = DiscreteVarNode("Earthquake") A = DiscreteVarNode("Alarm", 'Burglary Earthquake') J = DiscreteVarNode("JohnCalls", 'Alarm') M = DiscreteVarNode("MaryCalls", 'Alarm') B[True] = "Burglary:T" E[True] = 0.3 A[True,True,True] = 0.95
''' Created on 2013-6-10 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() B = DiscreteVarNode("Burglary") E = DiscreteVarNode("Earthquake") A = DiscreteVarNode("Alarm", 'Burglary Earthquake') J = DiscreteVarNode("JohnCalls", 'Alarm') M = DiscreteVarNode("MaryCalls", 'Alarm') B[True] = 0.2 E[True] = 0.3 A[True,True,True] = 0.95 A[True,True,False] = 0.8 A[True,False,True] = 0.9 A[True,False,False] = 0.2 J[True,True] = 0.7 J[True,False] = 0.2 M[True,True] = 0.4 M[True,False] = 0.6 Net.addNode(B) Net.addNode(E)
''' Created on 2013-6-13 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() A = DiscreteVarNode("A") B = DiscreteVarNode("B", "A") C = DiscreteVarNode("C", "B") A[True] = 0.17 B[True, True] = 0.3 B[True, False] = 0.4 C[True, True] = 0.5 C[True, False] = 0.7 Net.addNode(A) Net.addNode(B) Net.addNode(C) Net.init() sampleNum = 50000 sampleFrom = 1000
''' Created on May 28, 2013 @author: walter ''' if __name__ == '__main__': import numpy as np from BayesNet import * Net = BayesNet() B = BernoulliNode("Burglary") E = BernoulliNode("Earthquake") A = BernoulliNode("Alarm", 'Burglary Earthquake') J = BernoulliNode("JohnCalls", 'Alarm') M = BernoulliNode("MaryCalls", 'Alarm') B['T'] = 0.001 E['T'] = 0.002 A['T', 'T', 'T'] = 0.95 A['T', 'T', 'F'] = 0.94 A['T', 'F', 'T'] = 0.29 A['T', 'F', 'F'] = 0.001 J['T', 'T'] = 0.90 J['T', 'F'] = 0.05 M['T', 'T'] = 0.70 M['T', 'F'] = 0.01 Net.addNode(B) Net.addNode(E)
data["Golfmen"] = elements[0] data["Score"] = float(elements[1]) data["Tournament"] = elements[2] if (elements[0] in golfmen) == False: golfmen.append(elements[0]) if (elements[2] in tournaments) == False: tournaments.append(elements[2]) golfData.append(data) print "GolfData:{}".format(len(golfData)) print "Golfmen:{}".format(len(golfmen)) print "Tournamenets:{}".format(len(tournaments)) def addFunc(a, b): return a + b Net = BayesNet() HyperGolferVar = InvGammaNode(var="Hypergolfervar", alpha=18, beta=0.015) HyperTournMean = NormalNode(var="Hypertournmean", mean=72, variance=2) HyperTournVar = InvGammaNode(var="Hypertournvar", alpha=18, beta=0.015) Obsvar = InvGammaNode(var="Obsvar", alpha=83, beta=0.0014) Net.addNode(HyperGolferVar) Net.addNode(HyperTournMean) Net.addNode(HyperTournVar) Net.addNode(Obsvar) tournamentNode = [] idx = 0 for tn in tournaments: tournamentNode.append( NormalNode(var=tn, mean="Hypertournmean",
''' Created on 2013-6-9 @author: Walter ''' ''' Case 5: All parameters to learn with missing data ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() B_T = BetaNode("Burglary:T", alpha=2.0, beta=2.0) E_T = BetaNode("Earthquake:T", alpha=2.0, beta=2.0) ABE_TTT = BetaNode("Alarm:T|Burglary:T,Earthquake:T", alpha=2.0, beta=2.0) ABE_TTF = BetaNode("Alarm:T|Burglary:T,Earthquake:F", alpha=2.0, beta=2.0) ABE_TFT = BetaNode("Alarm:T|Burglary:F,Earthquake:T", alpha=2.0, beta=2.0) ABE_TFF = BetaNode("Alarm:T|Burglary:F,Earthquake:F", alpha=2.0, beta=2.0) JA_TT = BetaNode("JohnCalls:T|Alarm:T", alpha=2.0, beta=2.0) JA_TF = BetaNode("JohnCalls:T|Alarm:F", alpha=2.0, beta=2.0) MA_TT = BetaNode("MaryCalls:T|Alarm:T", alpha=2.0, beta=2.0) MA_TF = BetaNode("MaryCalls:T|Alarm:F", alpha=2.0, beta=2.0) B = DiscreteVarNode("Burglary") E = DiscreteVarNode("Earthquake") A = DiscreteVarNode("Alarm", 'Burglary Earthquake')
def TAN(data_set, metric='', debug=True): Learning.log('TAN: Learning bayes net started.', debug) data_set_1 = np.array(data_set) class_values = data_set_1[:, len(data_set[0]) - 1] data_set = data_set_1[:, :len(data_set[0]) - 1] bayes_net = BayesNet(data_set) possible_class_values = [] for x in class_values: if x not in possible_class_values: possible_class_values.append(x) n = bayes_net.get_data_set_rows_number(data_set) weights = {} l = float(len(class_values)) Learning.log('TAN: Mutual information calculating in progress.', debug) for node_i in bayes_net.nodes(): weights[node_i] = {} for node_j in bayes_net.nodes(): already_included = False if node_j in weights.keys(): if node_i in weights[node_j].keys(): weights[node_i][node_j] = weights[node_j][node_i] already_included = True if node_i != node_j and not already_included: values_i = list(bayes_net.net[node_i]['possible_values']) values_j = list(bayes_net.net[node_j]['possible_values']) mutual_information = 0 xxx = 0 for k in range(0, len(possible_class_values)): for i in range(0, len(values_i)): for j in range(0, len(values_j)): count_i = float(bayes_net.data[node_i].count( values_i[i])) count_j = float(bayes_net.data[node_j].count( values_j[j])) count_k = float( np.count_nonzero(class_values == possible_class_values[k])) count_x = 0.0 count_z = 0.0 for index in range( 0, len(bayes_net.data[node_i])): if bayes_net.data[node_i][ index] == values_i[i]: if class_values[ index] == possible_class_values[ k]: count_z += 1.0 if bayes_net.data[node_j][ index] == values_j[j]: count_x += 1.0 count_y = 0.0 for index in range( 0, len(bayes_net.data[node_j])): if bayes_net.data[node_j][ index] == values_j[j]: if class_values[ index] == possible_class_values[ k]: count_y += 1.0 Pi = float(count_i / l) Pj = float(count_j / l) Pk = float(count_k / l) Pijk = Pi * Pj * Pk Px = float(count_x / count_k) Pz = float(count_z / count_k) Py = float(count_y / count_k) if Pz != 0.0 and Py != 0 and Px != 0: mutual_information = mutual_information + Pijk * log( float(Px / (Pz * Py))) weights[node_i][node_j] = mutual_information Learning.log( 'TAN: Mutual information for nodes: ' + str(node_i) + " " + str(node_j) + ' : ' + str(weights[node_i][node_j]), debug) Learning.log('TAN: Mutual informations calculating done.', debug) print weights edges = {} possible_edges_num = len( bayes_net.net.keys()) * (len(bayes_net.net.keys()) - 1) / 2 Learning.log('TAN: Tree building in progress.', debug) while possible_edges_num > 0: causing_cycle = False check_next = True i = 0 create_edge = False max_weight = -100000 vertex_1 = None vertex_2 = None not_to_be_checked = {} while check_next: max_weight = -100000 for node_i in bayes_net.nodes(): for node_j in bayes_net.nodes(): to_pass = False if node_i not in not_to_be_checked.keys(): if node_j not in not_to_be_checked.keys(): to_pass = True elif node_i not in not_to_be_checked[node_j]: to_pass = True elif node_j not in not_to_be_checked[node_i]: to_pass = True if to_pass: if (node_i not in edges.keys() or node_j not in edges.keys() or node_j not in edges[node_i].keys() or node_i not in edges[node_j].keys())\ and node_i != node_j: if weights[node_i][node_j] > max_weight: max_weight = weights[node_i][node_j] vertex_1 = node_i vertex_2 = node_j causing_cycle = bayes_net.check_cycles_no_directions( edges, vertex_1, vertex_2) if causing_cycle: create_edge = True causing_cycle = False check_next = False # possible_edges_num -= 1 # break else: i += 1 if vertex_1 not in not_to_be_checked.keys(): not_to_be_checked[vertex_1] = [] if vertex_2 not in not_to_be_checked.keys(): not_to_be_checked[vertex_2] = [] not_to_be_checked[vertex_1].append(vertex_2) not_to_be_checked[vertex_2].append(vertex_1) if i == possible_edges_num: check_next = False i = 0 if create_edge: edges = bayes_net.add_edge_no_directions( vertex_1, vertex_2, edges, max_weight) possible_edges_num -= 1 print edges Learning.log('TAN: Tree building in progress - adding directions.', debug) root = bayes_net.net.keys()[0] possible = [] l = len(edges.keys()) while l > 0: copy_dict = copy.deepcopy(edges) if root in copy_dict.keys(): for child in copy_dict[root].keys(): bayes_net.add_edge(root, child) del edges[root][child] del edges[child][root] possible.append(child) if len(possible) > 0: root = possible[0] del possible[0] l = len(possible) Learning.log('TAN: Tree building done.', debug) Learning.log( 'TAN: Adding class as a root and linking it to all other nodes.', debug) for node in bayes_net.nodes(): bayes_net.net[node]['parents'].append('root') bayes_net.vertexes = bayes_net.vertexes[:-1] bayes_net.vertexes.append('root') root_children = bayes_net.net.keys() bayes_net.net['root'] = {} bayes_net.net['root']['possible_values'] = possible_class_values bayes_net.net['root']['children'] = root_children bayes_net.net['root']['parents'] = [] bayes_net.net['root']['probabilities'] = [] for node in bayes_net.net.keys(): print "node: ", node, " parents: ", bayes_net.net[node][ 'parents'], " children: ", bayes_net.net[node]['children'] Learning.log('TAN: Creating TAN tree done.', debug) score = bayes_net.score(data_set, metric) Learning.log('TAN: Score: ' + str(score), debug) return bayes_net
''' Created on Jun 2, 2013 @author: walter ''' from BayesNet import * from BayesNode import * import math if __name__ == '__main__': Net = BayesNet() def plus(x, y): return x + y A = NormalNode(var="A", mean=0.0, variance=123.0) B = NormalNode(var="B", mean=0.0, variance=145.0) C = NormalNode(var="C", mean="A", variance=167.0) D = NormalNode(var="D", mean=["A", "B"], variance=189.0, meanFunc=plus) E = NormalNode(var="E", mean="D", variance=101.0) Q = NormalNode(var="Q", mean="D", variance=0.01) Net.addNode(A) Net.addNode(B) Net.addNode(C) Net.addNode(D) Net.addNode(E) Net.addNode(Q)
''' Created on 2013-6-10 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() B_T = BetaNode("Burglary:T", alpha=2.0, beta=2.0) E_T = BetaNode("Earthquake:T", alpha=2.0, beta=2.0) ABE_TTT = BetaNode("Alarm:T|Burglary:T,Earthquake:T", alpha=2.0, beta=2.0) ABE_TTF = BetaNode("Alarm:T|Burglary:T,Earthquake:F", alpha=2.0, beta=2.0) ABE_TFT = BetaNode("Alarm:T|Burglary:F,Earthquake:T", alpha=2.0, beta=2.0) ABE_TFF = BetaNode("Alarm:T|Burglary:F,Earthquake:F", alpha=2.0, beta=2.0) JA_TT = BetaNode("JohnCalls:T|Alarm:T", alpha=2.0, beta=2.0) JA_TF = BetaNode("JohnCalls:T|Alarm:F", alpha=2.0, beta=2.0) MA_TT = BetaNode("MaryCalls:T|Alarm:T", alpha=2.0, beta=2.0) MA_TF = BetaNode("MaryCalls:T|Alarm:F", alpha=2.0, beta=2.0) ''' B = DiscreteVarNode("Burglary") E = DiscreteVarNode("Earthquake") A = DiscreteVarNode("Alarm", 'Burglary Earthquake') J = DiscreteVarNode("JohnCalls", 'Alarm') M = DiscreteVarNode("MaryCalls", 'Alarm') B[True] = "Burglary:T"
''' Created on 2013-6-8 @author: Walter ''' ''' Case 4: All parameters to learn with hyper hyper parameters ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() HAB_T = InvGammaNode("HyperAlphaOf Burglary:T", alpha=2.0, beta=2.0) HBABE_TFT = InvGammaNode("HyperBetaOf Alarm:T|Burglary:F,Earthquake:T", alpha=2.0, beta=2.0) B_T = BetaNode("Burglary:T", alpha="HyperAlphaOf Burglary:T", beta=2.0) E_T = BetaNode("Earthquake:T", alpha=2.0, beta=2.0) ABE_TTT = BetaNode("Alarm:T|Burglary:T,Earthquake:T", alpha=2.0, beta=2.0) ABE_TTF = BetaNode("Alarm:T|Burglary:T,Earthquake:F", alpha=2.0, beta=2.0) ABE_TFT = BetaNode("Alarm:T|Burglary:F,Earthquake:T", alpha=2.0, beta="HyperBetaOf Alarm:T|Burglary:F,Earthquake:T") ABE_TFF = BetaNode("Alarm:T|Burglary:F,Earthquake:F", alpha=2.0, beta=2.0) JA_TT = BetaNode("JohnCalls:T|Alarm:T", alpha=2.0, beta=2.0)
''' Created on 2013-6-5 @author: Walter ''' from BayesNet import * from BayesNode import * import math if __name__ == '__main__': Net = BayesNet() def sinSquareFunc(x): return np.sin(x)**2 def multiplyFunc(x, y): return x * y def squareFunc(x): return x**2 def addOneFunc(x): return x + 1 A1 = NormalNode(var="A1", mean=10.0, variance=2.0) E1 = GammaNode(var="E1", alpha=["A1"], beta=["B1"],
def hill_climbing_one_loop(data_set, metric='AIC', debug=False): bayes_net = BayesNet(data_set) score = bayes_net.score(data_set, metric) while True: max_score = score Learning.log('Score: ' + str(score), debug) for node_i in bayes_net.nodes(): for node_j in bayes_net.nodes(): if node_i != node_j: if not bayes_net.is_parent(node_j, node_i): if not bayes_net.check_cycle(node_j, node_i): bayes_net.add_edge(node_j, node_i) new_score = bayes_net.score(data_set, metric) if new_score <= score: bayes_net.delete_edge(node_j, node_i) else: score = new_score Learning.log( 'Adding edge ' + str(node_j) + ' -> ' + str(node_i) + '. New score: ' + str(score), debug) if bayes_net.is_parent(node_j, node_i): bayes_net.delete_edge(node_j, node_i) new_score = bayes_net.score(data_set, metric) if new_score <= score: bayes_net.add_edge(node_j, node_i) else: score = new_score Learning.log( 'Deleting edge ' + str(node_j) + ' -> ' + str(node_i) + '. New score: ' + str(score), debug) if bayes_net.is_parent(node_j, node_i): if not bayes_net.check_cycle(node_i, node_j, True): bayes_net.reverse_edge(node_j, node_i) new_score = bayes_net.score(data_set, metric) if new_score <= score: bayes_net.reverse_edge(node_i, node_j) else: score = new_score Learning.log( 'Reversing edge ' + str(node_j) + ' -> ' + str(node_i) + '. New score: ' + str(score), debug) if score <= max_score: break Learning.log('Learning bayes net ended. Score achieved: ' + str(score), debug) return bayes_net
''' Created on May 29, 2013 @author: walter ''' from BayesNet import * if __name__ == '__main__': Net = BayesNet() A = BernoulliNode("A") B = BernoulliNode("B", 'A') C = BernoulliNode("C", 'A B D') D = BernoulliNode("D", 'A') E = BernoulliNode("E", 'B C D') F = BernoulliNode("F", 'A D E') G = BernoulliNode("G", 'B E') A['T'] = 0.4 B['T','T'] = 0.39 B['T','F'] = 0.27 C['T','T','T','T'] = 0.15 C['T','T','T','F'] = 0.23 C['T','T','F','T'] = 0.94 C['T','T','F','F'] = 0.41 C['T','F','T','T'] = 0.77 C['T','F','T','F'] = 0.55 C['T','F','F','T'] = 0.69 C['T','F','F','F'] = 0.29 D['T','T'] = 0.73
''' Created on 2013-6-10 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': facultyData = [float(line) for line in open('faculty.dat')] Net = BayesNet() HVB = InvGammaNode("HyperBetaOfVariance", alpha=10, beta=5) Mean = NormalNode("Mean", mean=5.0, variance=1.0 / 9.0) Variance = InvGammaNode("Variance", alpha=11.0, beta="HyperBetaOfVariance") faculty = [] facultyObserve = {} Net.addNode(HVB) Net.addNode(Mean) Net.addNode(Variance) for i in range(len(facultyData)): name = "F{}".format(i) faculty.append(NormalNode(name, mean="Mean", variance="Variance")) Net.addNode(faculty[i]) facultyObserve[name] = facultyData[i]
if __name__ == '__main__': closePrice = [] reader = csv.reader(open("payx.csv")) ''' for Date,Open,High,Low,Close,Volume in reader: print Date,Open,High,Low,Close,Volume ''' for Date, Open, High, Low, Close, Volume in reader: closePrice.append(Close) print len(closePrice) #print closePrice Net = BayesNet() mu = InvGammaNode(var="mu", alpha=3.0, beta=0.5) sigma = GammaNode(var="sigma", alpha=2.0, beta=2.0) obsVar = GammaNode(var="obsVar", alpha=1.0, beta=2.0) def meanFunc1(lstS, mu): print mu print lstS return (1 + mu) * lstS def varFunc1(lstS, sigma): return sigma * lstS length = 20 S = []
''' Created on May 25, 2013 @author: walter ''' from BayesNet import * if __name__ == '__main__': Net = BayesNet() B = BernoulliNode("Burglary") E = BernoulliNode("Earthquake") A = BernoulliNode("Alarm", 'Burglary Earthquake') J = BernoulliNode("JohnCalls", 'Alarm') M = BernoulliNode("MaryCalls", 'Alarm') B['T'] = 0.001 E['T'] = 0.002 A['T', 'T', 'T'] = 0.95 A['T', 'T', 'F'] = 0.94 A['T', 'F', 'T'] = 0.29 A['T', 'F', 'F'] = 0.001 J['T', 'T'] = 0.90 J['T', 'F'] = 0.05 M['T', 'T'] = 0.70 M['T', 'F'] = 0.01 Net.addNode(B) Net.addNode(E) Net.addNode(A)
''' Created on May 28, 2013 @author: walter ''' ''' Model by Justin Page ''' from BayesNet import * if __name__ == '__main__': Net = BayesNet() A = BernoulliNode("A") B = BernoulliNode("B") C = BernoulliNode("C", 'A') D = BernoulliNode("D", 'A B') E = BernoulliNode("E") F = BernoulliNode("F", 'C D') G = BernoulliNode("G", 'B D E') A['T'] = 0.51 B['T'] = 0.31 C['T','T'] = 0.81 C['T','F'] = 0.31 D['T','T','T'] = 0.99 D['T','T','F'] = 0.73 D['T','F','T'] = 0.62 D['T','F','F'] = 0.17
elements = line.split(',') idx.append(int(elements[0])) currentScores.append(float(elements[1])) maxScores.append(float(elements[2])) exploredNum.append(int(elements[3])) sortedCurrentScores = copy.deepcopy(currentScores) sortedCurrentScores.sort() print "current scores len: " + str(len(currentScores)) print "max scores len: " + str(len(maxScores)) dataLen = len(currentScores) Net = BayesNet() hyperAlphaVar = NormalNode(var="hyperAlphaVar", mean=5.0, variance=5.0) hyperBetaVar = NormalNode(var="hyperBetaVar", mean=5.0, variance=5.0) obsVars = [] Net.addNode(hyperAlphaVar) Net.addNode(hyperBetaVar) dataObs = dict([]) initAlphaMean = 3.0 initAlphaVar = 2.0 initBetaMean = 3.0 initBetaVar = 2.0 initial = dict([])
if __name__ == '__main__': closePrice = [] reader = csv.reader(open("payx.csv")) ''' for Date,Open,High,Low,Close,Volume in reader: print Date,Open,High,Low,Close,Volume ''' for Date, Open, High, Low, Close, Volume in reader: closePrice.append(Close) print len(closePrice) #print closePrice Net = BayesNet() mu = InvGammaNode(var="mu", alpha=3.0, beta=0.5) sigma = GammaNode(var="sigma", alpha=2.0, beta=2.0) obsVar = GammaNode(var="obsVar", alpha=1.0, beta=2.0) def meanFunc1(lstS, mu): print mu print lstS return (1 + mu) * lstS def varFunc1(lstS, sigma): return sigma * lstS length = 5 S = []
''' Created on 2013-6-8 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': facultyData = [float(line) for line in open('faculty.dat')] Net = BayesNet() HMM = NormalNode("HyperMeanOfMean", mean = 5, variance=1.0/9.0) HMV = GammaNode("HyperVarianceOfMean", alpha = 10, beta = 5) HVA = GammaNode("HyperAlphaOfVariance", alpha = 10, beta = 5) HVB = GammaNode("HyperBetaOfVariance", alpha = 10, beta =5 ) Mean = NormalNode("Mean", mean="HyperMeanOfMean", variance="HyperVarianceOfMean") Variance = InvGammaNode("Variance", alpha="HyperAlphaOfVariance", beta="HyperBetaOfVariance") faculty = [] facultyObserve = {} Net.addNode(HMM) Net.addNode(HMV) Net.addNode(HVA) Net.addNode(HVB) Net.addNode(Mean) Net.addNode(Variance)
from BayesNet import * from Dataset import * from Input import * from Learning import * dataset_dim = 150 nodes = asia_net() net = BayesNet(nodes) dataset = Dataset(net, dataset_dim) sum_dag = np.zeros((len(nodes), len(nodes))) for i in range(1000): dataset = Dataset(net, dataset_dim) app = k2(dataset.dataset, dataset.ordered_array, 2) sum_dag = sum_dag + app print(sum_dag)
''' Created on 2013-6-14 @author: Walter ''' from BayesNet import * from BayesNode import * if __name__ == '__main__': Net = BayesNet() B = DiscreteVarNode("Burglary") E = DiscreteVarNode("Earthquake") A = DiscreteVarNode("Alarm", 'Burglary Earthquake') J = DiscreteVarNode("JohnCalls", 'Alarm') M = DiscreteVarNode("MaryCalls", 'Alarm') B[True] = 0.001 E[True] = 0.002 A[True, True, True] = 0.95 A[True, True, False] = 0.94 A[True, False, True] = 0.29 A[True, False, False] = 0.001 J[True, True] = 0.90 J[True, False] = 0.05 M[True, True] = 0.70 M[True, False] = 0.01 Net.addNode(B) Net.addNode(E)