def GetLocalConstraintsForRoot(self, file_prefix): then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename = "%s/%s_then_sdd.sdd" % (file_prefix, self.name) constraint = {} constraint["then_vtree"] = then_vtree_filename constraint["then"] = [then_sdd_filename] universe = [] # internal edges for sub_region_edge_tup in self.sub_region_edges: universe.append(sub_region_edge_tup) GraphSet.set_universe(universe) universe = GraphSet.universe() paths = GraphSet() child_names = self.children.keys() for (i, j) in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(i, j)) name_to_sdd_index = {} zdd_to_sdd_index = [None] # for generating sdd from graphset sdd_index = 0 for child in child_names: sdd_index += 1 name_to_sdd_index["c%s" % child] = sdd_index for sub_region_edge in universe: corresponding_network_edges = self.sub_region_edges[ sub_region_edge] coresponding_network_edges_sdd_index = [] for single_edge in corresponding_network_edges: sdd_index += 1 name_to_sdd_index[str(single_edge)] = sdd_index coresponding_network_edges_sdd_index.append(sdd_index) zdd_to_sdd_index.append(coresponding_network_edges_sdd_index) constraint["then_variable_mapping"] = name_to_sdd_index rl_vtree = sdd.sdd_vtree_new(sdd_index, "right") sdd_manager = sdd.sdd_manager_new(rl_vtree) sdd.sdd_vtree_free(rl_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(sdd_manager) # Construct simple path constraint simple_path_constraint = generate_sdd_from_graphset( paths, sdd_manager, zdd_to_sdd_index) # non empty path in this region map none_of_child = sdd.util.sdd_negative_term( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) case_one = sdd.sdd_conjoin(none_of_child, simple_path_constraint, sdd_manager) # empty path in this region map exactly_one_child = sdd.util.sdd_exactly_one( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( sdd_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(exactly_one_child, empty_path_constraint, sdd_manager) total_constraint = sdd.sdd_disjoin(case_one, case_two, sdd_manager) sdd.sdd_save(then_sdd_filename, total_constraint) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(sdd_manager)) sdd.sdd_manager_free(sdd_manager) return constraint
def start_manager(var_count,order): #vtree = sdd.sdd_vtree_new_with_var_order(var_count,order,"right") vtree = sdd.sdd_vtree_new(var_count,"right") #vtree = vtrees.right_linear_vtree(1,var_count+1) manager = sdd.sdd_manager_new(vtree) sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd.sdd_vtree_free(vtree) return manager
def primes_given_term(alpha, inst, mgr, primes_f): var_count = sdd.sdd_manager_var_count(mgr) primes_var_count = 3 * var_count primes_vtree = sdd.sdd_vtree_new(primes_var_count, "right") primes_mgr = sdd.sdd_manager_new(primes_vtree) variables = range(1, var_count + 1) cache1, cache2 = {}, {} kappa = primes_f(alpha, variables, inst, cache1, cache2, primes_mgr, mgr) kappa = _remove_dummies(kappa, var_count, primes_mgr) return kappa, primes_mgr, primes_vtree
def primes(alpha, mgr, primes_f=_primes_two): var_count = sdd.sdd_manager_var_count(mgr) primes_var_count = 3 * var_count primes_vtree = sdd.sdd_vtree_new(primes_var_count, "balanced") primes_mgr = sdd.sdd_manager_new(primes_vtree) variables = range(1, var_count + 1) cache1, cache2 = {}, {} kappa = primes_f(alpha, variables, cache1, cache2, primes_mgr, mgr) kappa = _remove_dummies(kappa, var_count, primes_mgr) return kappa, primes_mgr
def PI(sdd_filename, vtree_filename, num_features, model_list): vtree = sdd.sdd_vtree_new(num_features, "right") mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) alpha = sdd.sdd_read(sdd_filename, mgr) sdd.sdd_vtree_save(vtree_filename, vtree) print "-----Begin PI query-----" explqs.run_prime_implicant_query(alpha, mgr, num_features, model_list) print "-----End PI query-----\n"
def test_admission(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # WFEG # ( w ^ g ) alpha = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(4, mgr), mgr) # ( w ^ f ^ e ) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(beta, sdd.sdd_manager_literal(3, mgr), mgr) # ( f ^ e ^ g ) gamma = sdd.sdd_conjoin(sdd.sdd_manager_literal(2, mgr), sdd.sdd_manager_literal(3, mgr), mgr) gamma = sdd.sdd_conjoin(gamma, sdd.sdd_manager_literal(4, mgr), mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) alpha = sdd.sdd_disjoin(alpha, gamma, mgr) alpha = sdd.sdd_negate(alpha, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) for model in models.models(alpha, vtree): print "==", models.str_model(model) model_list = [model[var] for var in sorted(model.keys())] gamma, pmgr = compatible_primes(alpha, model_list, mgr, primes_mgr=(beta, pmgr)) pvtree = sdd.sdd_manager_vtree(pmgr) for prime_model in models.models(gamma, pvtree): print models.str_model(prime_model) term = prime_to_dict(prime_model, var_count) print " ".join([ ("*" if var not in term else "+" if term[var] == 1 else "-") for var in xrange(1, var_count + 1) ]) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def run(): vtree = sdd.sdd_vtree_read(vtree_filename) mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) alpha = sdd.sdd_read(sdd_filename, mgr) with open(variable_description_filename) as f: variable_description = f.readlines() num_features = int(variable_description[0].strip().split(" ")[1]) # can specify custom instances by doing # model_list = [[0,0,0,0],[0,0,0,1],[0,0,1,0],...] # enumerate a few positive instances from alpha model_list = get_model_list(alpha, vtree, 10) PI(alpha, mgr, num_features, model_list)
def test(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # A v B alpha = sdd.sdd_disjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), sdd.sdd_manager_literal(-4, mgr), mgr) # A v B v ( ~C ^ ~D ) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models #beta2 = sdd.sdd_global_minimize_cardinality(beta,pmgr) beta2 = beta for model in models.models(beta2, pvtree): print models.str_model(model) global cache_hits print "cache-hits:", cache_hits print "all-ones" beta, pmgr = compatible_primes(alpha, [1, 1, 1, 1], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "all-zeros" beta, pmgr = compatible_primes(alpha, [0, 0, 0, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "blah" beta, pmgr = compatible_primes(alpha, [1, 0, 1, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def test_andy(): var_count = 3 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # 100, 101, 111, 001, 011 alpha = sdd.sdd_manager_false(mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def convert_obdd_to_sdd(output_filename, documentation_filename): with open(output_filename, 'r') as f: nodes = f.readlines()[1:] with open(documentation_filename, 'r') as f: num_variables = int(f.readline().split(' ')[1]) nodes = [x.strip().split(' ') for x in nodes] nodes = [[int(x) if x.isdigit() else x for x in node] for node in nodes] node_dict = {} for l in nodes: node_dict[l[0]] = l[1:] #print node_dict vtree = sdd.sdd_vtree_new(num_variables, "right") mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) root = 0 return convert_helper(root, mgr, node_dict, {}, 0), vtree, mgr
def __setstate__(self, state): self.nodes = [] self.varcount = state['varcount'] tempfile = mktempfile() with open(tempfile, 'w') as f: f.write(state['vtree']) vtree = sdd.sdd_vtree_read(tempfile) self.__manager = sdd.sdd_manager_new(vtree) for n in state['nodes']: if n is None: self.nodes.append(None) else: with open(tempfile, 'w') as f: f.write(n) self.nodes.append(sdd.sdd_read(tempfile, self.__manager)) with open(tempfile, 'w') as f: f.write(state['constraint_dd']) self.constraint_dd = sdd.sdd_read(tempfile, self.__manager) os.remove(tempfile) return
def GetLocalConstraintsForInternalClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} # variables for the child clusters then_sdd_index = 0 zdd_to_sdd_index = [None] for child in self.children: then_sdd_index += 1 then_variable_mapping["c%s" % child] = then_sdd_index universe = self.sub_region_edges.keys() GraphSet.set_universe(universe) universe = GraphSet.universe() for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in self.sub_region_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) #***Non empty path in this region map none_of_child = sdd.util.sdd_negative_term( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) paths = GraphSet() child_names = self.children.keys() for c1, c2 in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(c1, c2)) simple_path_constraint = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) case_one = sdd.sdd_conjoin(simple_path_constraint, none_of_child, then_manager) #***Empty path in the region map exactly_one_chlid = sdd.util.sdd_exactly_one( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( then_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(empty_path_constraint, exactly_one_chlid, then_manager) case_two_then = sdd.sdd_disjoin(case_one, case_two, then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly one of the external edge is used and cluster_indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for child in self.children: if child == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, child)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping[str(e)] for e in self.internal_edges ]), then_manager) #conjoin that all the child indicator is off cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def GetLocalConstraintsForLeaveClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} zdd_to_sdd_index = [None] universe = [] node_pair_to_edges = {} for internal_edge in self.internal_edges: if (internal_edge.x, internal_edge.y) not in node_pair_to_edges: universe.append((internal_edge.x, internal_edge.y)) node_pair_to_edges.setdefault((internal_edge.x, internal_edge.y), []).append(internal_edge) GraphSet.set_universe(universe) universe = GraphSet.universe() then_sdd_index = 0 for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in node_pair_to_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) paths = GraphSet() for (i, j) in itertools.combinations(self.nodes, 2): paths = paths.union(GraphSet.paths(i, j)) case_two_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #exactly one of the external edge is used and cluster indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for node in self.nodes: if node == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, node)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) # disjoin the empty path cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, range(1, then_sdd_index + 1)), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) # exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
for x in it: y = it.next() yield (x, y) def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == '__main__': var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) #lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count
def start_manager(graph): var_count = len(graph.edges) vtree = sdd.sdd_vtree_new(var_count,"right") manager = sdd.sdd_manager_new(vtree) sdd.sdd_vtree_free(vtree) return manager
def run(): with timer.Timer("reading dataset"): dataset = util.read_binary_dataset(test_filename) domain = util.read_header(test_filename) ''' if OPTIONS.majority_circuit_opt: l = len(domain) for k in xrange(num_trees): domain["Tree_%d" % k] = l+k ''' with timer.Timer("initializing manager"): # start sdd manager var_count = len(domain) - 1 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) #sdd.sdd_manager_auto_gc_and_minimize_on(manager) #sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd_state = SddState(vtree, manager) with timer.Timer("reading constraints"): constraint_sdd, constraint_info = encode_logical_constraints( constraint_filename, manager, domain) sdd.sdd_ref(constraint_sdd, manager) with timer.Timer("reading trees"): tree_states = [] for filename in sorted(glob.glob(tree_basename.replace('%d', '*'))): tree = pygv.AGraph(filename) tree_state = TreeState(tree, domain, constraint_info) tree_states.append(tree_state) #tree.layout(prog='dot') #tree.draw(filename+".png") #num_trees = len(tree_states) with timer.Timer("compiling trees"): forest_sdds, _ = izip(*forest_sdds_iter(tree_states, sdd_state)) #forest_sdds = list(forest_sdds_iter(tree_states,sdd_state)) forest_sdds = [ (tree_state, tree_sdd) for tree_state, tree_sdd in zip(tree_states, forest_sdds) ] cmpf = lambda x, y: cmp(sdd.sdd_size(x[1]), sdd.sdd_size(y[1])) forest_sdds.sort(cmp=cmpf) tree_states = [tree_state for tree_state, tree_sdd in forest_sdds] #ACACAC sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd.sdd_manager_minimize_limited(manager) stats = SddSizeStats() for tree_state, tree_sdd in forest_sdds: stats.update(tree_sdd) sdd.sdd_deref(tree_sdd, manager) sdd.sdd_manager_garbage_collect(manager) forest_sdds, used_vars_list = izip( *forest_sdds_iter(tree_states, sdd_state)) print stats with timer.Timer("compiling all", prefix="| "): alpha = compile_all(forest_sdds, used_vars_list, num_trees, domain, manager, constraint_sdd) with timer.Timer("evaluating"): msg = util.evaluate_dataset_all_sdd(dataset, alpha, manager) print "| trees : %d" % num_trees print "--- evaluating majority vote on random forest (compiled):" print msg print "| all size :", sdd.sdd_size(alpha) print "| all count:", sdd.sdd_count(alpha) print " model count:", sdd.sdd_global_model_count(alpha, manager) with timer.Timer("checking monotonicity"): result = is_monotone(alpha, manager) print "Is monotone?", result #for tree_sdd in forest_sdds: sdd.sdd_deref(tree_sdd,manager) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) vtree = sdd.sdd_manager_vtree(manager) print "Writing sdd file %s and vtree file %s" % (sdd_filename, vtree_filename) sdd.sdd_save(sdd_filename, alpha) sdd.sdd_vtree_save(vtree_filename, vtree) print "Writing constraint sdd file %s and constraint vtree file %s" % ( constraint_sdd_filename, constraint_vtree_filename) sdd.sdd_save(constraint_sdd_filename, constraint_sdd) sdd.sdd_vtree_save(constraint_vtree_filename, vtree)
for x in it: y = it.next() yield (x, y) def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == "__main__": var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) # lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count