def is_monotonic(alpha, mgr, num_features, constraint_sdd): counterexample = [[None, None] for _ in xrange(num_features)] for i in xrange(num_features): beta1 = sdd.sdd_condition(i + 1, alpha, mgr) beta2 = sdd.sdd_condition(-(i + 1), alpha, mgr) beta3 = sdd.sdd_conjoin(beta1, beta2, mgr) # check if f|x does not entail f|!x gamma = sdd.sdd_conjoin( sdd.sdd_conjoin(sdd.sdd_negate(beta2, mgr), beta1, mgr), constraint_sdd, mgr) model = next(models.models(gamma, sdd.sdd_manager_vtree(mgr))) counterexample[i][0] = [v for _, v in model.items()] if counterexample[i][0]: counterexample[i][0][i] = 1 # check if f|!x does not entail f|x gamma = sdd.sdd_conjoin( sdd.sdd_conjoin(sdd.sdd_negate(beta1, mgr), beta2, mgr), constraint_sdd, mgr) model = next(models.models(gamma, sdd.sdd_manager_vtree(mgr))) counterexample[i][1] = [v for _, v in model.items()] if counterexample[i][1]: counterexample[i][1][i] = 0 for c in counterexample: if c[0] and c[1]: return False, counterexample return True, counterexample
def test_admission(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # WFEG # ( w ^ g ) alpha = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(4, mgr), mgr) # ( w ^ f ^ e ) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(beta, sdd.sdd_manager_literal(3, mgr), mgr) # ( f ^ e ^ g ) gamma = sdd.sdd_conjoin(sdd.sdd_manager_literal(2, mgr), sdd.sdd_manager_literal(3, mgr), mgr) gamma = sdd.sdd_conjoin(gamma, sdd.sdd_manager_literal(4, mgr), mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) alpha = sdd.sdd_disjoin(alpha, gamma, mgr) alpha = sdd.sdd_negate(alpha, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) for model in models.models(alpha, vtree): print "==", models.str_model(model) model_list = [model[var] for var in sorted(model.keys())] gamma, pmgr = compatible_primes(alpha, model_list, mgr, primes_mgr=(beta, pmgr)) pvtree = sdd.sdd_manager_vtree(pmgr) for prime_model in models.models(gamma, pvtree): print models.str_model(prime_model) term = prime_to_dict(prime_model, var_count) print " ".join([ ("*" if var not in term else "+" if term[var] == 1 else "-") for var in xrange(1, var_count + 1) ]) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def primes_by_length(primes, pmgr, var_count): by_length = defaultdict(list) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(primes, pvtree): term = prime_to_dict(model, var_count) by_length[len(term)].append(term) return by_length
def run_prime_implicant_query(alpha, mgr, num_features, models_list): #print num_features, models_list for model_list in models_list: gamma, pmgr, pvtree2 = primes_given_term(alpha, model_list, mgr, _primes_one_given_term) pvtree = sdd.sdd_manager_vtree(pmgr) pi_str = [] gamma = sdd.sdd_global_minimize_cardinality(gamma, pmgr) for prime_model in models.models(gamma, pvtree): try: term = prime_to_dict(prime_model, num_features) term_str = " ".join([("*" if var not in term else "1" if term[var] == 1 else "0") for var in xrange(1, num_features + 1)]) pi_str.append(term_str) except: pi_str = [ "Key error. Make sure instance is is a model of the SDD." ] pi_str.sort(key=lambda x: x.count('*'), reverse=True) print "Model: " + str(model_list) + "" print "PI explanations:" for pi in pi_str[:3]: print str(pi) sdd.sdd_vtree_free(pvtree2) sdd.sdd_manager_free(pmgr)
def __getstate__(self): tempfile = mktempfile() vtree = sdd.sdd_manager_vtree(self.get_manager()) sdd.sdd_vtree_save(tempfile, vtree) with open(tempfile) as f: vtree_data = f.read() nodes = [] for n in self.nodes: if n is not None: sdd.sdd_save(tempfile, n) with open(tempfile) as f: nodes.append(f.read()) else: nodes.append(None) sdd.sdd_save(tempfile, self.constraint_dd) with open(tempfile) as f: constraint_dd = f.read() os.remove(tempfile) return { 'varcount': self.varcount, 'nodes': nodes, 'vtree': vtree_data, 'constraint_dd': constraint_dd }
def parse_bdd(filename): var_count,node_count = pre_parse_bdd(filename) print " zdd var count:", var_count print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() return manager,nodes[-1][0]
def GetLocalConstraintsForRoot(self, file_prefix): then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename = "%s/%s_then_sdd.sdd" % (file_prefix, self.name) constraint = {} constraint["then_vtree"] = then_vtree_filename constraint["then"] = [then_sdd_filename] universe = [] # internal edges for sub_region_edge_tup in self.sub_region_edges: universe.append(sub_region_edge_tup) GraphSet.set_universe(universe) universe = GraphSet.universe() paths = GraphSet() child_names = self.children.keys() for (i, j) in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(i, j)) name_to_sdd_index = {} zdd_to_sdd_index = [None] # for generating sdd from graphset sdd_index = 0 for child in child_names: sdd_index += 1 name_to_sdd_index["c%s" % child] = sdd_index for sub_region_edge in universe: corresponding_network_edges = self.sub_region_edges[ sub_region_edge] coresponding_network_edges_sdd_index = [] for single_edge in corresponding_network_edges: sdd_index += 1 name_to_sdd_index[str(single_edge)] = sdd_index coresponding_network_edges_sdd_index.append(sdd_index) zdd_to_sdd_index.append(coresponding_network_edges_sdd_index) constraint["then_variable_mapping"] = name_to_sdd_index rl_vtree = sdd.sdd_vtree_new(sdd_index, "right") sdd_manager = sdd.sdd_manager_new(rl_vtree) sdd.sdd_vtree_free(rl_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(sdd_manager) # Construct simple path constraint simple_path_constraint = generate_sdd_from_graphset( paths, sdd_manager, zdd_to_sdd_index) # non empty path in this region map none_of_child = sdd.util.sdd_negative_term( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) case_one = sdd.sdd_conjoin(none_of_child, simple_path_constraint, sdd_manager) # empty path in this region map exactly_one_child = sdd.util.sdd_exactly_one( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( sdd_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(exactly_one_child, empty_path_constraint, sdd_manager) total_constraint = sdd.sdd_disjoin(case_one, case_two, sdd_manager) sdd.sdd_save(then_sdd_filename, total_constraint) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(sdd_manager)) sdd.sdd_manager_free(sdd_manager) return constraint
def run_mincard_query(alpha, mgr, num_features, models_list): for model in models_list: beta = condition_and_minimize(alpha, mgr, num_features, model) vtree = sdd.sdd_manager_vtree(mgr) print "Model: ", model print "MC Explanations: " for model in sdd.models.models(beta, vtree): print sdd.models.str_model(model)
def test(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # A v B alpha = sdd.sdd_disjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), sdd.sdd_manager_literal(-4, mgr), mgr) # A v B v ( ~C ^ ~D ) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models #beta2 = sdd.sdd_global_minimize_cardinality(beta,pmgr) beta2 = beta for model in models.models(beta2, pvtree): print models.str_model(model) global cache_hits print "cache-hits:", cache_hits print "all-ones" beta, pmgr = compatible_primes(alpha, [1, 1, 1, 1], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "all-zeros" beta, pmgr = compatible_primes(alpha, [0, 0, 0, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "blah" beta, pmgr = compatible_primes(alpha, [1, 0, 1, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def print_grids(alpha,m,n,g,manager): from inf import models var_count = m*(n-1) + (m-1)*n #print "COUNT:", sdd.sdd_model_count(alpha,manager) print "COUNT:", global_model_count(alpha,manager) for model in models.models(alpha,sdd.sdd_manager_vtree(manager)): print models.str_model(model,var_count=var_count) draw_grid(model,m,n,g,True)
def enumerate_primes(primes, pmgr, var_count): pvtree = sdd.sdd_manager_vtree(pmgr) while not sdd.sdd_node_is_false(primes): mincard = sdd.sdd_global_minimize_cardinality(primes, pmgr) for model in models.models(mincard, pvtree): term = prime_to_dict(model, var_count) yield term primes = sdd.sdd_conjoin(primes, sdd.sdd_negate(mincard, pmgr), pmgr)
def test_andy(): var_count = 3 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # 100, 101, 111, 001, 011 alpha = sdd.sdd_manager_false(mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def PI(sdd_filename, vtree_filename, num_features, model_list): vtree = sdd.sdd_vtree_new(num_features, "right") mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) alpha = sdd.sdd_read(sdd_filename, mgr) sdd.sdd_vtree_save(vtree_filename, vtree) print "-----Begin PI query-----" explqs.run_prime_implicant_query(alpha, mgr, num_features, model_list) print "-----End PI query-----\n"
def print_grids(alpha,dimension,manager): #import pdb; pdb.set_trace() from inf import models var_count = 2*dimension*(dimension-1) #var_count = 2*dimension*(dimension-1) + dimension*dimension #var_count = 2*dimension[0]*dimension[1] - dimension[0] - dimension[1] print "COUNT:", sdd.sdd_model_count(alpha,manager) for model in models.models(alpha,sdd.sdd_manager_vtree(manager)): print models.str_model(model,var_count=var_count) draw_grid(model,dimension)
def _sanity_check(f, mgr, g, pmgr): """f is original function and g is its prime implicants""" alpha = sdd.sdd_manager_false(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for prime in models.models(g, pvtree): term = prime_to_term(prime, mgr) beta = sdd.sdd_conjoin(term, f, mgr) assert term == beta assert _is_prime(prime, f, mgr) alpha = sdd.sdd_disjoin(alpha, term, mgr) mc1 = sdd.sdd_global_model_count(f, mgr) mc2 = sdd.sdd_global_model_count(alpha, mgr) print "mc-check:", mc1, mc2, ("ok" if mc1 == mc2 else "NOT OK") assert mc1 == mc2 assert alpha == f
def run(): vtree = sdd.sdd_vtree_read(vtree_filename) mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) alpha = sdd.sdd_read(sdd_filename, mgr) with open(variable_description_filename) as f: variable_description = f.readlines() num_features = int(variable_description[0].strip().split(" ")[1]) # can specify custom instances by doing # model_list = [[0,0,0,0],[0,0,0,1],[0,0,1,0],...] # enumerate a few positive instances from alpha model_list = get_model_list(alpha, vtree, 10) PI(alpha, mgr, num_features, model_list)
def sdd_exactly_one_among(manager, active_variables, background_variables): if not all(x in background_variables for x in active_variables): raise Exception( "Invalid argument active variables %s, background_variables %s " % (active_variables, background_variables)) result = sdd.sdd_manager_false(manager) for positive_variable in active_variables: cur_term = sdd.sdd_manager_true(manager) for variable in background_variables: if variable != positive_variable: cur_lit = sdd.sdd_manager_literal(-variable, manager) else: cur_lit = sdd.sdd_manager_literal(variable, manager) cur_term = sdd.sdd_conjoin(cur_term, cur_lit, manager) sdd.sdd_save("t1.sdd", result) sdd.sdd_save("t2.sdd", cur_term) sdd.sdd_vtree_save("manager.vtree", sdd.sdd_manager_vtree(manager)) result = sdd.sdd_disjoin(result, cur_term, manager) return result
def convert_obdd_to_sdd(output_filename, documentation_filename): with open(output_filename, 'r') as f: nodes = f.readlines()[1:] with open(documentation_filename, 'r') as f: num_variables = int(f.readline().split(' ')[1]) nodes = [x.strip().split(' ') for x in nodes] nodes = [[int(x) if x.isdigit() else x for x in node] for node in nodes] node_dict = {} for l in nodes: node_dict[l[0]] = l[1:] #print node_dict vtree = sdd.sdd_vtree_new(num_variables, "right") mgr = sdd.sdd_manager_new(vtree) vtree = sdd.sdd_manager_vtree(mgr) root = 0 return convert_helper(root, mgr, node_dict, {}, 0), vtree, mgr
def convert(filename): start = time.time() manager,alpha = orig.parse_bdd(filename+".zdd") end = time.time() print " sdd node count: %d" % sdd.sdd_count(alpha) print " sdd size: %d" % sdd.sdd_size(alpha) print " sdd model count: %d" % sdd.sdd_model_count(alpha,manager) print " global model count: %d" % orig.global_model_count(alpha,manager) print " read bdd time: %.3fs" % (end-start) sdd.sdd_save(filename + ".sdd",alpha) #sdd.sdd_save_as_dot(filename +".sdd.dot",alpha) vtree = sdd.sdd_manager_vtree(manager) sdd.sdd_vtree_save(filename + ".vtree",vtree) #sdd.sdd_vtree_save_as_dot(filename +".vtree.dot",vtree) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager)
print " global model count: %d" % global_model_count(alphaNoMP,managerNoMP) print " read bdd time: %.3fs" % (end-start) """ sdd.sdd_ref(alpha,manager) start = time.time() sdd.sdd_manager_minimize(manager) end = time.time() print " min sdd node count: %d" % sdd.sdd_count(alpha) print " min sdd time: %.3fs" % (end-start) sdd.sdd_deref(alpha,manager) """ sdd.sdd_save(filename + ".sdd",alpha) #sdd.sdd_save_as_dot(filename +".sdd.dot",alpha) vtree = sdd.sdd_manager_vtree(manager) sdd.sdd_vtree_save(filename + ".vtree",vtree) #sdd.sdd_vtree_save_as_dot(filename +".vtree.dot",vtree) sdd.sdd_save(filenameNoMP + ".sdd",alphaNoMP) vtreeNoMP = sdd.sdd_manager_vtree(managerNoMP) sdd.sdd_vtree_save(filenameNoMP + ".vtree",vtreeNoMP) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..."
def parse_bdd(filename,var_count=None): if var_count is None: var_count,node_count = pre_parse_bdd(filename) else: max_count,node_count = pre_parse_bdd(filename) #print " zdd var count:", var_count #print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() last_sdd,last_vtree = nodes[-1] vt = sdd.sdd_manager_vtree(manager) if vt != last_vtree: last_sdd = zero_normalize_sdd(last_sdd,last_vtree,vt,manager) return manager,last_sdd
def run(): with timer.Timer("reading dataset"): dataset = util.read_binary_dataset(test_filename) domain = util.read_header(test_filename) ''' if OPTIONS.majority_circuit_opt: l = len(domain) for k in xrange(num_trees): domain["Tree_%d" % k] = l+k ''' with timer.Timer("initializing manager"): # start sdd manager var_count = len(domain) - 1 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) #sdd.sdd_manager_auto_gc_and_minimize_on(manager) #sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd_state = SddState(vtree, manager) with timer.Timer("reading constraints"): constraint_sdd, constraint_info = encode_logical_constraints( constraint_filename, manager, domain) sdd.sdd_ref(constraint_sdd, manager) with timer.Timer("reading trees"): tree_states = [] for filename in sorted(glob.glob(tree_basename.replace('%d', '*'))): tree = pygv.AGraph(filename) tree_state = TreeState(tree, domain, constraint_info) tree_states.append(tree_state) #tree.layout(prog='dot') #tree.draw(filename+".png") #num_trees = len(tree_states) with timer.Timer("compiling trees"): forest_sdds, _ = izip(*forest_sdds_iter(tree_states, sdd_state)) #forest_sdds = list(forest_sdds_iter(tree_states,sdd_state)) forest_sdds = [ (tree_state, tree_sdd) for tree_state, tree_sdd in zip(tree_states, forest_sdds) ] cmpf = lambda x, y: cmp(sdd.sdd_size(x[1]), sdd.sdd_size(y[1])) forest_sdds.sort(cmp=cmpf) tree_states = [tree_state for tree_state, tree_sdd in forest_sdds] #ACACAC sdd.sdd_manager_auto_gc_and_minimize_off(manager) sdd.sdd_manager_minimize_limited(manager) stats = SddSizeStats() for tree_state, tree_sdd in forest_sdds: stats.update(tree_sdd) sdd.sdd_deref(tree_sdd, manager) sdd.sdd_manager_garbage_collect(manager) forest_sdds, used_vars_list = izip( *forest_sdds_iter(tree_states, sdd_state)) print stats with timer.Timer("compiling all", prefix="| "): alpha = compile_all(forest_sdds, used_vars_list, num_trees, domain, manager, constraint_sdd) with timer.Timer("evaluating"): msg = util.evaluate_dataset_all_sdd(dataset, alpha, manager) print "| trees : %d" % num_trees print "--- evaluating majority vote on random forest (compiled):" print msg print "| all size :", sdd.sdd_size(alpha) print "| all count:", sdd.sdd_count(alpha) print " model count:", sdd.sdd_global_model_count(alpha, manager) with timer.Timer("checking monotonicity"): result = is_monotone(alpha, manager) print "Is monotone?", result #for tree_sdd in forest_sdds: sdd.sdd_deref(tree_sdd,manager) print "====================" print "before garbage collecting..." print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) print "garbage collecting..." sdd.sdd_manager_garbage_collect(manager) print "live size:", sdd.sdd_manager_live_count(manager) print "dead size:", sdd.sdd_manager_dead_count(manager) vtree = sdd.sdd_manager_vtree(manager) print "Writing sdd file %s and vtree file %s" % (sdd_filename, vtree_filename) sdd.sdd_save(sdd_filename, alpha) sdd.sdd_vtree_save(vtree_filename, vtree) print "Writing constraint sdd file %s and constraint vtree file %s" % ( constraint_sdd_filename, constraint_vtree_filename) sdd.sdd_save(constraint_sdd_filename, constraint_sdd) sdd.sdd_vtree_save(constraint_vtree_filename, vtree)
def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == '__main__': var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) #lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count sdd.sdd_manager_free(manager) sdd.sdd_vtree_free(vtree)
def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == "__main__": var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) # lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count sdd.sdd_manager_free(manager) sdd.sdd_vtree_free(vtree)
def GetLocalConstraintsForLeaveClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} zdd_to_sdd_index = [None] universe = [] node_pair_to_edges = {} for internal_edge in self.internal_edges: if (internal_edge.x, internal_edge.y) not in node_pair_to_edges: universe.append((internal_edge.x, internal_edge.y)) node_pair_to_edges.setdefault((internal_edge.x, internal_edge.y), []).append(internal_edge) GraphSet.set_universe(universe) universe = GraphSet.universe() then_sdd_index = 0 for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in node_pair_to_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) paths = GraphSet() for (i, j) in itertools.combinations(self.nodes, 2): paths = paths.union(GraphSet.paths(i, j)) case_two_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #exactly one of the external edge is used and cluster indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for node in self.nodes: if node == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, node)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) # disjoin the empty path cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, range(1, then_sdd_index + 1)), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) # exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def GetLocalConstraintsForInternalClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} # variables for the child clusters then_sdd_index = 0 zdd_to_sdd_index = [None] for child in self.children: then_sdd_index += 1 then_variable_mapping["c%s" % child] = then_sdd_index universe = self.sub_region_edges.keys() GraphSet.set_universe(universe) universe = GraphSet.universe() for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in self.sub_region_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) #***Non empty path in this region map none_of_child = sdd.util.sdd_negative_term( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) paths = GraphSet() child_names = self.children.keys() for c1, c2 in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(c1, c2)) simple_path_constraint = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) case_one = sdd.sdd_conjoin(simple_path_constraint, none_of_child, then_manager) #***Empty path in the region map exactly_one_chlid = sdd.util.sdd_exactly_one( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( then_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(empty_path_constraint, exactly_one_chlid, then_manager) case_two_then = sdd.sdd_disjoin(case_one, case_two, then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly one of the external edge is used and cluster_indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for child in self.children: if child == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, child)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping[str(e)] for e in self.internal_edges ]), then_manager) #conjoin that all the child indicator is off cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint