def __getstate__(self): tempfile = mktempfile() vtree = sdd.sdd_manager_vtree(self.get_manager()) sdd.sdd_vtree_save(tempfile, vtree) with open(tempfile) as f: vtree_data = f.read() nodes = [] for n in self.nodes: if n is not None: sdd.sdd_save(tempfile, n) with open(tempfile) as f: nodes.append(f.read()) else: nodes.append(None) sdd.sdd_save(tempfile, self.constraint_dd) with open(tempfile) as f: constraint_dd = f.read() os.remove(tempfile) return { 'varcount': self.varcount, 'nodes': nodes, 'vtree': vtree_data, 'constraint_dd': constraint_dd }
def GetLocalConstraintsForRoot(self, file_prefix): then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename = "%s/%s_then_sdd.sdd" % (file_prefix, self.name) constraint = {} constraint["then_vtree"] = then_vtree_filename constraint["then"] = [then_sdd_filename] universe = [] # internal edges for sub_region_edge_tup in self.sub_region_edges: universe.append(sub_region_edge_tup) GraphSet.set_universe(universe) universe = GraphSet.universe() paths = GraphSet() child_names = self.children.keys() for (i, j) in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(i, j)) name_to_sdd_index = {} zdd_to_sdd_index = [None] # for generating sdd from graphset sdd_index = 0 for child in child_names: sdd_index += 1 name_to_sdd_index["c%s" % child] = sdd_index for sub_region_edge in universe: corresponding_network_edges = self.sub_region_edges[ sub_region_edge] coresponding_network_edges_sdd_index = [] for single_edge in corresponding_network_edges: sdd_index += 1 name_to_sdd_index[str(single_edge)] = sdd_index coresponding_network_edges_sdd_index.append(sdd_index) zdd_to_sdd_index.append(coresponding_network_edges_sdd_index) constraint["then_variable_mapping"] = name_to_sdd_index rl_vtree = sdd.sdd_vtree_new(sdd_index, "right") sdd_manager = sdd.sdd_manager_new(rl_vtree) sdd.sdd_vtree_free(rl_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(sdd_manager) # Construct simple path constraint simple_path_constraint = generate_sdd_from_graphset( paths, sdd_manager, zdd_to_sdd_index) # non empty path in this region map none_of_child = sdd.util.sdd_negative_term( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) case_one = sdd.sdd_conjoin(none_of_child, simple_path_constraint, sdd_manager) # empty path in this region map exactly_one_child = sdd.util.sdd_exactly_one( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( sdd_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(exactly_one_child, empty_path_constraint, sdd_manager) total_constraint = sdd.sdd_disjoin(case_one, case_two, sdd_manager) sdd.sdd_save(then_sdd_filename, total_constraint) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(sdd_manager)) sdd.sdd_manager_free(sdd_manager) return constraint
def PI(sdd_filename, vtree_filename, num_features, model_list): vtree = sdd.sdd_vtree_new(num_features, "right") mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) alpha = sdd.sdd_read(sdd_filename, mgr) sdd.sdd_vtree_save(vtree_filename, vtree) print "-----Begin PI query-----" explqs.run_prime_implicant_query(alpha, mgr, num_features, model_list) print "-----End PI query-----\n"
def main(): config_file = sys.argv[1] with open(config_file, 'r') as f: config = json.load(f) basename = str("../../" + config[u"output_filepath"] + config[u"name"] + "_" + config[u"id"]) output_filename = basename + ".odd" documentation_filename = basename + ".txt" alpha, vtree, mgr = convert_obdd_to_sdd(output_filename, documentation_filename) sdd.sdd_save(basename + ".sdd", alpha) sdd.sdd_vtree_save(basename + ".vtree", vtree)
def sdd_exactly_one_among(manager, active_variables, background_variables): if not all(x in background_variables for x in active_variables): raise Exception( "Invalid argument active variables %s, background_variables %s " % (active_variables, background_variables)) result = sdd.sdd_manager_false(manager) for positive_variable in active_variables: cur_term = sdd.sdd_manager_true(manager) for variable in background_variables: if variable != positive_variable: cur_lit = sdd.sdd_manager_literal(-variable, manager) else: cur_lit = sdd.sdd_manager_literal(variable, manager) cur_term = sdd.sdd_conjoin(cur_term, cur_lit, manager) sdd.sdd_save("t1.sdd", result) sdd.sdd_save("t2.sdd", cur_term) sdd.sdd_vtree_save("manager.vtree", sdd.sdd_manager_vtree(manager)) result = sdd.sdd_disjoin(result, cur_term, manager) return result
def convert(filename): start = time.time() manager,alpha = orig.parse_bdd(filename+".zdd") end = time.time() print " sdd node count: %d" % sdd.sdd_count(alpha) print " sdd size: %d" % sdd.sdd_size(alpha) print " sdd model count: %d" % sdd.sdd_model_count(alpha,manager) print " global model count: %d" % orig.global_model_count(alpha,manager) print " read bdd time: %.3fs" % (end-start) sdd.sdd_save(filename + ".sdd",alpha) #sdd.sdd_save_as_dot(filename +".sdd.dot",alpha) vtree = sdd.sdd_manager_vtree(manager) sdd.sdd_vtree_save(filename + ".vtree",vtree) #sdd.sdd_vtree_save_as_dot(filename +".vtree.dot",vtree) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager)
def GetLocalConstraintsForInternalClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} # variables for the child clusters then_sdd_index = 0 zdd_to_sdd_index = [None] for child in self.children: then_sdd_index += 1 then_variable_mapping["c%s" % child] = then_sdd_index universe = self.sub_region_edges.keys() GraphSet.set_universe(universe) universe = GraphSet.universe() for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in self.sub_region_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) #***Non empty path in this region map none_of_child = sdd.util.sdd_negative_term( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) paths = GraphSet() child_names = self.children.keys() for c1, c2 in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(c1, c2)) simple_path_constraint = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) case_one = sdd.sdd_conjoin(simple_path_constraint, none_of_child, then_manager) #***Empty path in the region map exactly_one_chlid = sdd.util.sdd_exactly_one( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( then_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(empty_path_constraint, exactly_one_chlid, then_manager) case_two_then = sdd.sdd_disjoin(case_one, case_two, then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly one of the external edge is used and cluster_indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for child in self.children: if child == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, child)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping[str(e)] for e in self.internal_edges ]), then_manager) #conjoin that all the child indicator is off cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def GetLocalConstraintsForLeaveClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} zdd_to_sdd_index = [None] universe = [] node_pair_to_edges = {} for internal_edge in self.internal_edges: if (internal_edge.x, internal_edge.y) not in node_pair_to_edges: universe.append((internal_edge.x, internal_edge.y)) node_pair_to_edges.setdefault((internal_edge.x, internal_edge.y), []).append(internal_edge) GraphSet.set_universe(universe) universe = GraphSet.universe() then_sdd_index = 0 for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in node_pair_to_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) paths = GraphSet() for (i, j) in itertools.combinations(self.nodes, 2): paths = paths.union(GraphSet.paths(i, j)) case_two_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #exactly one of the external edge is used and cluster indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for node in self.nodes: if node == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, node)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) # disjoin the empty path cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, range(1, then_sdd_index + 1)), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) # exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def run(): with timer.Timer("reading dataset"): dataset = util.read_binary_dataset(test_filename) domain = util.read_header(test_filename) ''' if OPTIONS.majority_circuit_opt: l = len(domain) for k in xrange(num_trees): domain["Tree_%d" % k] = l+k ''' with timer.Timer("initializing manager"): # start sdd manager var_count = len(domain) - 1 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) #sdd.sdd_manager_auto_gc_and_minimize_on(manager) #sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd_state = SddState(vtree, manager) with timer.Timer("reading constraints"): constraint_sdd, constraint_info = encode_logical_constraints( constraint_filename, manager, domain) sdd.sdd_ref(constraint_sdd, manager) with timer.Timer("reading trees"): tree_states = [] for filename in sorted(glob.glob(tree_basename.replace('%d', '*'))): tree = pygv.AGraph(filename) tree_state = TreeState(tree, domain, constraint_info) tree_states.append(tree_state) #tree.layout(prog='dot') #tree.draw(filename+".png") #num_trees = len(tree_states) with timer.Timer("compiling trees"): forest_sdds, _ = izip(*forest_sdds_iter(tree_states, sdd_state)) #forest_sdds = list(forest_sdds_iter(tree_states,sdd_state)) forest_sdds = [ (tree_state, tree_sdd) for tree_state, tree_sdd in zip(tree_states, forest_sdds) ] cmpf = lambda x, y: cmp(sdd.sdd_size(x[1]), sdd.sdd_size(y[1])) forest_sdds.sort(cmp=cmpf) tree_states = [tree_state for tree_state, tree_sdd in forest_sdds] #ACACAC sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd.sdd_manager_minimize_limited(manager) stats = SddSizeStats() for tree_state, tree_sdd in forest_sdds: stats.update(tree_sdd) sdd.sdd_deref(tree_sdd, manager) sdd.sdd_manager_garbage_collect(manager) forest_sdds, used_vars_list = izip( *forest_sdds_iter(tree_states, sdd_state)) print stats with timer.Timer("compiling all", prefix="| "): alpha = compile_all(forest_sdds, used_vars_list, num_trees, domain, manager, constraint_sdd) with timer.Timer("evaluating"): msg = util.evaluate_dataset_all_sdd(dataset, alpha, manager) print "| trees : %d" % num_trees print "--- evaluating majority vote on random forest (compiled):" print msg print "| all size :", sdd.sdd_size(alpha) print "| all count:", sdd.sdd_count(alpha) print " model count:", sdd.sdd_global_model_count(alpha, manager) with timer.Timer("checking monotonicity"): result = is_monotone(alpha, manager) print "Is monotone?", result #for tree_sdd in forest_sdds: sdd.sdd_deref(tree_sdd,manager) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) vtree = sdd.sdd_manager_vtree(manager) print "Writing sdd file %s and vtree file %s" % (sdd_filename, vtree_filename) sdd.sdd_save(sdd_filename, alpha) sdd.sdd_vtree_save(vtree_filename, vtree) print "Writing constraint sdd file %s and constraint vtree file %s" % ( constraint_sdd_filename, constraint_vtree_filename) sdd.sdd_save(constraint_sdd_filename, constraint_sdd) sdd.sdd_vtree_save(constraint_vtree_filename, vtree)
print " sdd model count: %d" % sdd.sdd_model_count(alpha,manager) print " global model count: %d" % global_model_count(alpha,manager) print " read bdd time: %.3fs" % (end-start) """ sdd.sdd_ref(alpha,manager) start = time.time() sdd.sdd_manager_minimize(manager) end = time.time() print " min sdd node count: %d" % sdd.sdd_count(alpha) print " min sdd time: %.3fs" % (end-start) sdd.sdd_deref(alpha,manager) """ sdd.sdd_save(filename + ".sdd",alpha) #sdd.sdd_save_as_dot(filename +".sdd.dot",alpha) vtree = sdd.sdd_manager_vtree(manager) sdd.sdd_vtree_save(filename + ".vtree",vtree) #sdd.sdd_vtree_save_as_dot(filename +".vtree.dot",vtree) """ print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) """
print " read bdd time: %.3fs" % (end-start) """ sdd.sdd_ref(alpha,manager) start = time.time() sdd.sdd_manager_minimize(manager) end = time.time() print " min sdd node count: %d" % sdd.sdd_count(alpha) print " min sdd time: %.3fs" % (end-start) sdd.sdd_deref(alpha,manager) """ sdd.sdd_save(filename + ".sdd",alpha) #sdd.sdd_save_as_dot(filename +".sdd.dot",alpha) vtree = sdd.sdd_manager_vtree(manager) sdd.sdd_vtree_save(filename + ".vtree",vtree) #sdd.sdd_vtree_save_as_dot(filename +".vtree.dot",vtree) sdd.sdd_save(filenameNoMP + ".sdd",alphaNoMP) vtreeNoMP = sdd.sdd_manager_vtree(managerNoMP) sdd.sdd_vtree_save(filenameNoMP + ".vtree",vtreeNoMP) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager)