def test_empirical_distrib_continuous(self): continuous = ContinuousDistribution("var1", UniformDensityFunction(-1.0, 3.0)) bn = BNetwork() var1 = ChanceNode("var1", continuous) bn.add_node(var1) sampling = SamplingAlgorithm(2000, 200) distrib2 = sampling.query_prob(bn, "var1") assert len(distrib2.get_posterior( Assignment()).get_values()) == pytest.approx( Settings.discretization_buckets, abs=2) assert distrib2.to_continuous().get_cumulative_prob( -1.1) == pytest.approx(0, abs=0.001) assert distrib2.to_continuous().get_cumulative_prob( 1.0) == pytest.approx(0.5, abs=0.06) assert distrib2.to_continuous().get_cumulative_prob( 3.1) == pytest.approx(1.0, abs=0.00) assert continuous.get_prob_density(-2.0) == pytest.approx( distrib2.to_continuous().get_prob_density(-2.0), abs=0.1) assert continuous.get_prob_density(-0.5) == pytest.approx( distrib2.to_continuous().get_prob_density(-0.5), abs=0.1) assert continuous.get_prob_density(1.8) == pytest.approx( distrib2.to_continuous().get_prob_density(1.8), abs=0.1) assert continuous.get_prob_density(3.2) == pytest.approx( distrib2.to_continuous().get_prob_density(3.2), abs=0.1)
def reduce(self, query): """ Reduces the Bayesian network to a subset of its variables. This reduction operates here by generating the possible conditional assignments for every retained variables, and calculating the distribution for each assignment. :param query: the reduction query :return: the reduced network """ network = query.get_network() query_vars = set(query.get_query_vars()) evidence = query.get_evidence() original_sorted_node_ids = network.get_sorted_node_ids() sorted_node_ids = list() for node_id in original_sorted_node_ids: if node_id in query_vars: sorted_node_ids.append(node_id) sorted_node_ids = list(reversed(sorted_node_ids)) reduced_network = BNetwork() for variable_id in sorted_node_ids: direct_ancestors = network.get_node(variable_id).get_ancestor_ids( query_vars) input_values = dict() for direct_ancestor in direct_ancestors: input_values[direct_ancestor] = network.get_node( variable_id).get_values() assignments = InferenceUtils.get_all_combinations(input_values) builder = ConditionalTableBuilder(variable_id) for assignment in assignments: new_evidence = Assignment([evidence, assignment]) result = self.query_prob(network, variable_id, new_evidence) builder.add_rows(assignment, result.get_table()) chance_node = ChanceNode(variable_id, builder.build()) for ancestor in direct_ancestors: chance_node.add_input_node(reduced_network.get_node(ancestor)) reduced_network.add_node(chance_node) return reduced_network
def get_bayesian_network(main_node): """ Returns the initial state or parameters from the XML document, for the given domain (where the variable types are already declared) :param main_node: the main node for the XML document :return: the corresponding dialogue state """ state = BNetwork() for child_node in main_node: if child_node.tag == 'variable': chance_node = XMLStateReader.create_chance_node(child_node) state.add_node(chance_node) elif child_node.tag != '#text' and child_node.tag != '#comment': raise ValueError() return state
def __init__(self): """ Creates a new domain with an empty dialogue state and list of models. """ self._settings = Settings() self._models = [] # list of models self._initial_state = DialogueState() # initial dialog state self._parameters = BNetwork() self._imported_files = [] self._xml_file = None # path to the source XML file (and its imports)
def test_dep_empirical_distrib_continuous(self): bn = BNetwork() builder = CategoricalTableBuilder("var1") builder.add_row(ValueFactory.create("one"), 0.7) builder.add_row(ValueFactory.create("two"), 0.3) var1 = ChanceNode("var1", builder.build()) bn.add_node(var1) continuous = ContinuousDistribution("var2", UniformDensityFunction(-1.0, 3.0)) continuous2 = ContinuousDistribution( "var2", GaussianDensityFunction(3.0, 10.0)) table = ConditionalTable("var2") table.add_distrib(Assignment("var1", "one"), continuous) table.add_distrib(Assignment("var1", "two"), continuous2) var2 = ChanceNode("var2", table) var2.add_input_node(var1) bn.add_node(var2) inference = InferenceChecks() inference.check_cdf(bn, "var2", -1.5, 0.021) inference.check_cdf(bn, "var2", 0., 0.22) inference.check_cdf(bn, "var2", 2., 0.632) inference.check_cdf(bn, "var2", 8., 0.98)
def test_empirical_distrib(self): st = CategoricalTableBuilder("var1") st.add_row("val1", 0.6) st.add_row("val2", 0.4) builder = ConditionalTableBuilder("var2") builder.add_row(Assignment("var1", "val1"), "val1", 0.9) builder.add_row(Assignment("var1", "val1"), "val2", 0.1) builder.add_row(Assignment("var1", "val2"), "val1", 0.2) builder.add_row(Assignment("var1", "val2"), "val2", 0.8) bn = BNetwork() var1 = ChanceNode("var1", st.build()) bn.add_node(var1) var2 = ChanceNode("var2", builder.build()) var2.add_input_node(var1) bn.add_node(var2) sampling = SamplingAlgorithm(2000, 500) distrib = sampling.query_prob(bn, "var2", Assignment("var1", "val1")) assert distrib.get_prob("val1") == pytest.approx(0.9, abs=0.05) assert distrib.get_prob("val2") == pytest.approx(0.1, abs=0.05) distrib2 = sampling.query_prob(bn, "var2") assert distrib2.get_prob("val1") == pytest.approx(0.62, abs=0.05) assert distrib2.get_prob("val2") == pytest.approx(0.38, abs=0.05)
def reduce(self, query): """ Reduces the Bayesian network by retaining only a subset of variables and marginalising out the rest. :param query: the query containing the network to reduce, the variables to retain, and possible evidence. :return: the probability distributions for the retained variables reduction operation failed """ network = query.get_network() query_vars = query.get_query_vars() query_factor = self._create_query_factor(query) reduced_network = BNetwork() original_sorted_node_ids = network.get_sorted_node_ids() sorted_node_ids = list() for node_id in original_sorted_node_ids: if node_id in query_vars: sorted_node_ids.append(node_id) sorted_node_ids = list(reversed(sorted_node_ids)) for variable in sorted_node_ids: direct_ancestors = network.get_node(variable).get_ancestor_ids( query_vars) factor = self._get_relevant_factor(query_factor, variable, direct_ancestors) distrib = self._create_prob_distribution(variable, factor) chance_node = ChanceNode(variable, distrib) for ancestor in direct_ancestors: chance_node.add_input_node(reduced_network.get_node(ancestor)) reduced_network.add_node(chance_node) return reduced_network
def test_sorted_nodes(self): bn = NetworkExamples.construct_basic_network() assert "Action" == bn.get_sorted_nodes()[7].get_id() assert "Burglary" == bn.get_sorted_nodes()[6].get_id() assert "Earthquake" == bn.get_sorted_nodes()[5].get_id() assert "Alarm" == bn.get_sorted_nodes()[4].get_id() assert "Util1" == bn.get_sorted_nodes()[3].get_id() assert "Util2" == bn.get_sorted_nodes()[2].get_id() assert "JohnCalls" == bn.get_sorted_nodes()[1].get_id() assert "MaryCalls" == bn.get_sorted_nodes()[0].get_id() d1 = ActionNode("a_m'") d2 = ActionNode("a_m.obj'") d3 = ActionNode("a_m.place'") bn2 = BNetwork() bn2.add_node(d1) bn2.add_node(d2) bn2.add_node(d3) assert "a_m'" == bn2.get_sorted_nodes()[2].get_id() assert "a_m.obj'" == bn2.get_sorted_nodes()[1].get_id() assert "a_m.place'" == bn2.get_sorted_nodes()[0].get_id()
def extract_bayesian_network_from_string(full_string): """ Extracts the bayesian network from a XML string. :param full_string: the string containing the initial state content :return: the corresponding Bayesian network """ # extract the XML document document = XMLUtils.get_xml_document(io.StringIO(full_string)) main_node = XMLUtils.get_main_node(document) if main_node.tag == 'state': return XMLStateReader.get_bayesian_network(main_node) for child_node in main_node: if child_node.tag == 'state': return XMLStateReader.get_bayesian_network(child_node) return BNetwork()
def prune(state): """ Prunes the state of all the non-necessary nodes. the operation selects a subset of relevant nodes to keep, prunes the irrelevant ones, remove the primes from the variable labels, and delete all empty nodes. :param state: the state to prune """ # step 1: selection of nodes to keep nodes_to_keep = StatePruner.get_nodes_to_keep(state) if len(nodes_to_keep) > 0: # step 2: reduction reduced = StatePruner.reduce(state, nodes_to_keep) # step 3: reinsert action and utility nodes (if necessary) StatePruner.reinsert_action_and_utility_nodes(reduced, state) # step 4: remove the primes from the identifiers StatePruner.remove_primes(reduced) # step 5: filter the distribution and remove and empty nodes StatePruner.remove_spurious_nodes(reduced) # step 6: and final reset the state to the reduced form state.reset(reduced) else: state.reset(BNetwork())
def reduce(self, query): """ Reduces the Bayesian network to a subset of its variables and returns the result. NB: the equivalent "reduce" method includes additional speed-up methods to simplify the reduction process. :param query: the reduction query :return: the reduced Bayesian network """ network = query.get_network() query_vars = query.get_query_vars() is_query = LikelihoodWeighting(query, self._nr_samples, self._max_sampling_time) samples = is_query.get_samples() full_distrib = EmpiricalDistribution(samples) reduced_network = BNetwork() for variable in query.get_sorted_query_vars(): input_node_ids = network.get_node(variable).get_ancestor_ids( query_vars) for input_node_id in list(input_node_ids): input_node = reduced_network.get_chance_node(input_node_id) if isinstance(input_node.get_distrib(), ContinuousDistribution): input_node_ids.remove(input_node_id) distrib = full_distrib.get_marginal(variable, input_node_ids) node = ChanceNode(variable, distrib) for input_node_id in input_node_ids: node.add_input_node(reduced_network.get_node(input_node_id)) reduced_network.add_node(node) return reduced_network
def __init__(self, arg1=None, arg2=None): if arg1 is None and arg2 is None: """ Creates a new, empty dialogue state. """ super().__init__() super().reset(BNetwork()) self._evidence = Assignment( ) # evidence values for state variables self._parameter_vars = set( ) # Subset of variables that denote parameters self._incremental_vars = set( ) # Subset of variables that are currently incrementally constructed self._init_lock() elif isinstance(arg1, BNetwork) and arg2 is None: network = arg1 """ Creates a new dialogue state that contains the Bayesian network provided as argument. :param network: the Bayesian network to include """ super().__init__() super().reset(network) self._evidence = Assignment( ) # evidence values for state variables self._parameter_vars = set( ) # Subset of variables that denote parameters self._incremental_vars = set( ) # Subset of variables that are currently incrementally constructed self._init_lock() elif isinstance(arg1, Collection) and isinstance(arg2, Assignment): nodes = arg1 evidence = arg2 """ Creates a new dialogue state that contains the set of nodes provided as argument. :param nodes: the nodes to include :param evidence: the evidence """ super().__init__(nodes) self._evidence = Assignment(evidence) self._parameter_vars = set( ) # Subset of variables that denote parameters self._incremental_vars = set( ) # Subset of variables that are currently incrementally constructed self._init_lock() elif isinstance(arg1, BNetwork) and isinstance(arg2, Assignment): network = arg1 evidence = arg2 """ Creates a new dialogue state that contains the Bayesian network provided as argument. :param network: the Bayesian network to include :param evidence: the additional evidence """ super().__init__() super().reset(network) self._evidence = Assignment(evidence) self._parameter_vars = set( ) # Subset of variables that denote parameters self._incremental_vars = set( ) # Subset of variables that are currently incrementally constructed self._init_lock() else: raise NotImplementedError("UNDEFINED PARAMETERS")
def test_dirichlet(self): old_discretisation_settings = Settings.discretization_buckets Settings.discretization_buckets = 250 alphas = list() alphas.append(40.0) alphas.append(80.0) alphas = np.array(alphas) dirichlet = DirichletDensityFunction(alphas) distrib = ContinuousDistribution("x", dirichlet) assert isinstance(distrib.sample(), ArrayVal) assert 2 == len(distrib.sample()) assert distrib.sample().get_array()[0] == pytest.approx(0.33, abs=0.15) ############################################## # dirichlet distribution 자바 코드에 버그가 있음. ############################################## # assert distrib.get_prob_density(ArrayVal([1./3, 2./3])) == pytest.approx(8.0, abs=0.5) n = ChanceNode("x", distrib) network = BNetwork() network.add_node(n) table = VariableElimination().query_prob(network, "x") sum = 0. for value in table.get_values(): if value.get_array()[0] < 0.33333: sum += table.get_prob(value) assert sum == pytest.approx(0.5, abs=0.1) conversion1 = VariableElimination().query_prob(network, "x") assert abs( len(conversion1.get_posterior(Assignment()).get_values()) - Settings.discretization_buckets) < 10 assert conversion1.get_posterior(Assignment()).get_prob( ValueFactory.create("[0.3333,0.6666]")) == pytest.approx(0.02, abs=0.05) conversion3 = SamplingAlgorithm(4000, 1000).query_prob(network, "x") # DistributionViewer(conversion3) # Thread.sleep(3000000) # TODO: 아래 테스트 케이스 문제 없는지 확인 필요. # assert conversion3.to_continuous().get_prob_density(ValueFactory.create("[0.3333,0.6666]")) == pytest.approx(9.0, abs=1.5) assert distrib.get_function().get_mean()[0] == pytest.approx(0.333333, abs=0.01) assert distrib.get_function().get_variance()[0] == pytest.approx( 0.002, abs=0.01) assert conversion3.to_continuous().get_function().get_mean( )[0] == pytest.approx(0.333333, abs=0.05) assert conversion3.to_continuous().get_function().get_variance( )[0] == pytest.approx(0.002, abs=0.05) Settings.discretization_buckets = old_discretisation_settings
def construct_iwsds_network(): network = BNetwork() builder = CategoricalTableBuilder("i_u") builder.add_row(ValueFactory.create("ki"), 0.4) builder.add_row(ValueFactory.create("of"), 0.3) builder.add_row(ValueFactory.create("co"), 0.3) i_u = ChanceNode("i_u", builder.build()) network.add_node(i_u) builder = ConditionalTableBuilder("a_u") builder.add_row(Assignment("i_u", "ki"), ValueFactory.create("ki"), 0.9) builder.add_row(Assignment("i_u", "ki"), ValueFactory.create("null"), 0.1) builder.add_row(Assignment("i_u", "of"), ValueFactory.create("of"), 0.9) builder.add_row(Assignment("i_u", "of"), ValueFactory.create("null"), 0.1) builder.add_row(Assignment("i_u", "co"), ValueFactory.create("co"), 0.9) builder.add_row(Assignment("i_u", "co"), ValueFactory.create("null"), 0.1) a_u = ChanceNode("a_u", builder.build()) a_u.add_input_node(i_u) network.add_node(a_u) builder = ConditionalTableBuilder("a_u") builder.add_row(Assignment("a_u", "ki"), ValueFactory.create("True"), 0.0) builder.add_row(Assignment("a_u", "ki"), ValueFactory.create("False"), 1.0) builder.add_row(Assignment("a_u", "of"), ValueFactory.create("True"), 0.6) builder.add_row(Assignment("a_u", "of"), ValueFactory.create("False"), 0.4) builder.add_row(Assignment("a_u", "co"), ValueFactory.create("True"), 0.15) builder.add_row(Assignment("a_u", "co"), ValueFactory.create("False"), 0.85) builder.add_row(Assignment("a_u", "null"), ValueFactory.create("True"), 0.25) builder.add_row(Assignment("a_u", "null"), ValueFactory.create("False"), 0.75) o = ChanceNode("o", builder.build()) o.add_input_node(a_u) network.add_node(o) a_m = ActionNode("a_m") a_m.add_value(ValueFactory.create("ki")) a_m.add_value(ValueFactory.create("of")) a_m.add_value(ValueFactory.create("co")) a_m.add_value(ValueFactory.create("rep")) network.add_node(a_m) r = UtilityNode("r") r.add_input_node(a_m) r.add_input_node(i_u) r.add_utility( Assignment(Assignment("a_m", "ki"), Assignment("i_u", "ki")), 3) r.add_utility( Assignment(Assignment("a_m", "ki"), Assignment("i_u", "of")), -5) r.add_utility( Assignment(Assignment("a_m", "ki"), Assignment("i_u", "co")), -5) r.add_utility( Assignment(Assignment("a_m", "of"), Assignment("i_u", "ki")), -5) r.add_utility( Assignment(Assignment("a_m", "of"), Assignment("i_u", "of")), 3) r.add_utility( Assignment(Assignment("a_m", "of"), Assignment("i_u", "co")), -5) r.add_utility( Assignment(Assignment("a_m", "co"), Assignment("i_u", "ki")), -5) r.add_utility( Assignment(Assignment("a_m", "co"), Assignment("i_u", "of")), -5) r.add_utility( Assignment(Assignment("a_m", "co"), Assignment("i_u", "co")), 3) r.add_utility( Assignment(Assignment("a_m", "rep"), Assignment("i_u", "ki")), -0.5) r.add_utility( Assignment(Assignment("a_m", "rep"), Assignment("i_u", "of")), -0.5) r.add_utility( Assignment(Assignment("a_m", "rep"), Assignment("i_u", "co")), -0.5) network.add_node(r) return network
def construct_basic_network(): network = BNetwork() builder = CategoricalTableBuilder('Burglary') builder.add_row(ValueFactory.create(True), 0.001) builder.add_row(ValueFactory.create(False), 0.999) b = ChanceNode("Burglary", builder.build()) network.add_node(b) builder = CategoricalTableBuilder('Earthquake') builder.add_row(ValueFactory.create(True), 0.002) builder.add_row(ValueFactory.create(False), 0.998) e = ChanceNode("Earthquake", builder.build()) network.add_node(e) builder = ConditionalTableBuilder('Alarm') builder.add_row(Assignment(["Burglary", "Earthquake"]), ValueFactory.create(True), 0.95) builder.add_row(Assignment(["Burglary", "Earthquake"]), ValueFactory.create(False), 0.05) builder.add_row(Assignment(["Burglary", "!Earthquake"]), ValueFactory.create(True), 0.95) builder.add_row(Assignment(["Burglary", "!Earthquake"]), ValueFactory.create(False), 0.05) builder.add_row(Assignment(["!Burglary", "Earthquake"]), ValueFactory.create(True), 0.29) builder.add_row(Assignment(["!Burglary", "Earthquake"]), ValueFactory.create(False), 0.71) builder.add_row(Assignment(["!Burglary", "!Earthquake"]), ValueFactory.create(True), 0.001) builder.add_row(Assignment(["!Burglary", "!Earthquake"]), ValueFactory.create(False), 0.999) a = ChanceNode("Alarm", builder.build()) a.add_input_node(b) a.add_input_node(e) network.add_node(a) builder = ConditionalTableBuilder("MaryCalls") builder.add_row(Assignment("Alarm"), ValueFactory.create(True), 0.7) builder.add_row(Assignment("Alarm"), ValueFactory.create(False), 0.3) builder.add_row(Assignment("!Alarm"), ValueFactory.create(True), 0.01) builder.add_row(Assignment("!Alarm"), ValueFactory.create(False), 0.99) mc = ChanceNode("MaryCalls", builder.build()) mc.add_input_node(a) network.add_node(mc) builder = ConditionalTableBuilder("JohnCalls") builder.add_row(Assignment(["Alarm"]), ValueFactory.create(True), 0.9) builder.add_row(Assignment(["Alarm"]), ValueFactory.create(False), 0.1) builder.add_row(Assignment(["!Alarm"]), ValueFactory.create(True), 0.05) builder.add_row(Assignment(["!Alarm"]), ValueFactory.create(False), 0.95) jc = ChanceNode("JohnCalls", builder.build()) jc.add_input_node(a) network.add_node(jc) action = ActionNode("Action") action.add_value(ValueFactory.create("CallPolice")) action.add_value(ValueFactory.create("DoNothing")) network.add_node(action) value = UtilityNode("Util1") value.add_input_node(b) value.add_input_node(action) value.add_utility( Assignment(Assignment("Burglary", True), "Action", ValueFactory.create("CallPolice")), -0.5) value.add_utility( Assignment(Assignment("Burglary", False), "Action", ValueFactory.create("CallPolice")), -1.0) value.add_utility( Assignment(Assignment("Burglary", True), "Action", ValueFactory.create("DoNothing")), 0.0) value.add_utility( Assignment(Assignment("Burglary", False), "Action", ValueFactory.create("DoNothing")), 0.0) network.add_node(value) value2 = UtilityNode("Util2") value2.add_input_node(b) value2.add_input_node(action) value2.add_utility( Assignment(Assignment("Burglary", True), "Action", ValueFactory.create("CallPolice")), 0.0) value2.add_utility( Assignment(Assignment("Burglary", False), "Action", ValueFactory.create("CallPolice")), 0.0) value2.add_utility( Assignment(Assignment("Burglary", True), "Action", ValueFactory.create("DoNothing")), -10.0) value2.add_utility( Assignment(Assignment("Burglary", False), "Action", ValueFactory.create("DoNothing")), 0.5) network.add_node(value2) return network