def get_model(self): """ Returns an instance of Bayesian Model or Markov Model. Varibles are in the pattern var_0, var_1, var_2 where var_0 is 0th index variable, var_1 is 1st index variable. Return ------ model: an instance of Bayesian or Markov Model. Examples -------- >>> reader = UAIReader('TestUAI.uai') >>> reader.get_model() """ if self.network_type == 'BAYES': model = BayesianModel() model.add_nodes_from(self.variables) model.add_edges_from(self.edges) tabular_cpds = [] for cpd in self.tables: child_var = cpd[0] states = int(self.domain[child_var]) arr = list(map(float, cpd[1])) values = np.array(arr) values = values.reshape(states, values.size // states) tabular_cpds.append(TabularCPD(child_var, states, values)) model.add_cpds(*tabular_cpds) return model elif self.network_type == 'MARKOV': model = MarkovModel(self.edges) factors = [] for table in self.tables: variables = table[0] cardinality = [int(self.domain[var]) for var in variables] value = list(map(float, table[1])) factor = DiscreteFactor(variables=variables, cardinality=cardinality, values=value) factors.append(factor) model.add_factors(*factors) return model
def get_model(self): """ Returns an instance of Bayesian Model or Markov Model. Varibles are in the pattern var_0, var_1, var_2 where var_0 is 0th index variable, var_1 is 1st index variable. Return ------ model: an instance of Bayesian or Markov Model. Examples -------- >>> reader = UAIReader('TestUAI.uai') >>> reader.get_model() """ if self.network_type == 'BAYES': model = BayesianModel(self.edges) tabular_cpds = [] for cpd in self.tables: child_var = cpd[0] states = int(self.domain[child_var]) arr = list(map(float, cpd[1])) values = np.array(arr) values = values.reshape(states, values.size // states) tabular_cpds.append(TabularCPD(child_var, states, values)) model.add_cpds(*tabular_cpds) return model elif self.network_type == 'MARKOV': model = MarkovModel(self.edges) factors = [] for table in self.tables: variables = table[0] cardinality = [int(self.domain[var]) for var in variables] value = list(map(float, table[1])) factor = DiscreteFactor(variables=variables, cardinality=cardinality, values=value) factors.append(factor) model.add_factors(*factors) return model
# Represent Markov Network Model from pgmpy.models import MarkovModel model = MarkovModel([('A', 'B'), ('B', 'C')]) model.add_node('D') model.add_edges_from([('C', 'D'), ('D', 'A')]) # --- Define factors to associate with model from pgmpy.factors import Factor factor_a_b = Factor(variables=['A', 'B'], cardibality=[2,2], values=[90. 100, 1, 10]) factor_b_c = Factor(variables=['B', 'C'], cardibality=[2,2], values=[10. 80, 70, 20]) factor_c_d = Factor(variables=['C', 'D'], cardibality=[2,2], values=[50. 120, 10, 10]) factor_d_a = Factor(variables=['D', 'A'], cardibality=[2,2], values=[20. 80, 50, 40]) # --- Add factors to model model.add_factors(factor_a_b, factor_b_c, factor_c_d, factor_d_a) model.get_factors() " [<Factor representing phi(A:2, B:2) at aidfoaiudn;aksndf> " " [<Factor representing phi(B:2, C:2) at aidfoaasdfsdfn;aksndf> " " [<Factor representing phi(C:2, D:2) at aidfoaiud234234sndf> " " [<Factor representing phi(D:2, A:2) at aawjdfblasjiudn;aksndf> " # Cluster Graph -- "Cluster Graph" ######################################################## # # ________ _______ ________ # |__f1__|---|__A__|---|__f3__|
values=cpd2) factor5 = DiscreteFactor(['safety', 'safety_received'], cardinality=[3, 3], values=cpd2) factor6 = DiscreteFactor(['lug_boot', 'lug_boot_received'], cardinality=[3, 3], values=cpd2) # for any given x (where x can also be continuous) this reduces to a table with k scaled probabilities # # print(factor3) # factor3.reduce([('doors_received', 1)]) # # factor.normalize() # print(factor3) # add the factor to the network # model.add_factors(factor1) model.add_factors(factor2) model.add_factors(factor3) model.add_factors(factor4) model.add_factors(factor5) model.add_factors(factor6) # print(model.nodes()) # print(model.edges()) # can also use VE # inference = BeliefPropagation(model) print("Class variable prior:\n{}\n".format( inference.query(variables=['class'])['class'])) print("Class variable posterior after noisy observation:\n{}\n".format(
d1 = to_pydot(g1) d1.set_dpi(300) d1.set_margin(0.5) Image(d1.create_png(), width=300) from pgmpy.models import MarkovModel from pgmpy.factors.discrete import DiscreteFactor # 마르코프 랜덤 필드(마르코프 모델) 모델 정의 model = MarkovModel([('A', 'B'), ('B', 'C'), ('C','D'), ('D','A')]) factor1 = DiscreteFactor(['A', 'B'], [2,2], [30, 5, 1, 10], state_names={'A': [0,1], 'B':[0,1]}) factor2 = DiscreteFactor(['B', 'C'], [2,2], [100, 1, 1, 100], state_names={'B': [0,1], 'C':[0,1]}) factor3 = DiscreteFactor(['C', 'D'], [2,2], [1, 100, 100, 1], state_names={'C': [0,1], 'D':[0,1]}) factor4 = DiscreteFactor(['D', 'A'], [2,2], [100, 1, 1, 100], state_names={'D': [0,1], 'A':[0,1]}) model.add_factors(factor1, factor2, factor3, factor4) print('모델의 타당성: ', model.check_model()) import numpy as np pf_value = model.get_partition_function() print('\n분할 함수의 값: ', pf_value) infer = VariableElimination(model) # 추론 객체 생성 phi_ABCD = infer.query(['A', 'B', 'C', 'D']) # 전체 분포 phi(A,B,C,D) print('phi(A,B,C,D)') print(phi_ABCD) P_ABCD = phi_ABCD.values/pf_value # 확률 = (팩터의 곱)/(분할 함수의 값) PABCD = np.reshape(P_ABCD, -1) for val in PABCD: # 확률의 출력 print(val, '\n')