def compile_tree(node, tree_state, sdd_state, label="0", st=None, path_sdd=None): if st is None: st = "" mgr = sdd_state.manager if path_sdd is None: path_sdd = sdd.sdd_manager_true(mgr) tree = tree_state.tree children = tree.successors(node) if len(children) == 0: node_label = node.attr['label'] node_label = node_label.split(':')[-1].strip().split(' ')[0] if label == node_label: # disjoin path #print st alpha = sdd.sdd_disjoin(sdd_state.alpha, path_sdd, mgr) sdd.sdd_deref(sdd_state.alpha, mgr) sdd.sdd_ref(alpha, mgr) sdd_state.alpha = alpha else: for child in children: edge = tree.get_edge(node, child) var = node.attr['label'].split(' ')[-1] val = edge.attr['label'].split(' ')[-1] child_st = st + "%s:%s " % (var, val) # extend path base_var = "_".join(var.split('_')[:-1]) + "_%d" cur_index = int(var.split('_')[-1]) low_index, high_index = 0, tree_state.constraint_info[base_var][0] beta = sdd.sdd_manager_false(mgr) if val == ">=": for i in xrange(cur_index + 1, high_index): sdd_lit = tree_state.domain[base_var % i] beta = sdd.sdd_disjoin( beta, sdd.sdd_manager_literal(sdd_lit, mgr), mgr) else: # val == "<" for i in xrange(low_index, cur_index + 1): sdd_lit = tree_state.domain[base_var % i] beta = sdd.sdd_disjoin( beta, sdd.sdd_manager_literal(sdd_lit, mgr), mgr) sdd_var = tree_state.domain[var] new_path_sdd = sdd.sdd_conjoin(path_sdd, beta, mgr) sdd_state.used_vars.add(sdd_var) child_st = st + "%s:%s " % (var, val) sdd.sdd_ref(new_path_sdd, mgr) compile_tree(child, tree_state, sdd_state, label=label, st=child_st, path_sdd=new_path_sdd) sdd.sdd_deref(new_path_sdd, mgr)
def _primes_two(alpha, variables, cache1, cache2, pmgr, mgr): if len(variables) == 0: if sdd.sdd_node_is_false(alpha): return sdd.sdd_manager_false(pmgr) if sdd.sdd_node_is_true(alpha): return sdd.sdd_manager_true(pmgr) key = (len(variables), sdd.sdd_id(alpha)) if key in cache1: global cache_hits cache_hits += 1 if cache_hits % 1000 == 0: print "cache-hits-update:", cache_hits return cache1[key] var, remaining = variables[0], variables[1:] alpha0 = sdd.sdd_condition(-var, alpha, mgr) alpha1 = sdd.sdd_condition(var, alpha, mgr) primes0 = _primes_two(alpha0, remaining, cache1, cache2, pmgr, mgr) primes1 = _primes_two(alpha1, remaining, cache1, cache2, pmgr, mgr) qrimes0 = _keep_imp(primes0, alpha1, remaining, cache1, cache2, pmgr, mgr) qrimes1 = _keep_imp(primes1, alpha0, remaining, cache1, cache2, pmgr, mgr) gamma = sdd.sdd_disjoin(qrimes0, qrimes1, pmgr) gamma = sdd.sdd_conjoin(_sdd_unused(var, pmgr), gamma, pmgr) kappa = sdd.sdd_conjoin(primes0, sdd.sdd_negate(qrimes0, pmgr), pmgr) kappa = sdd.sdd_conjoin(kappa, _sdd_used_neg(var, pmgr), pmgr) gamma = sdd.sdd_disjoin(gamma, kappa, pmgr) kappa = sdd.sdd_conjoin(primes1, sdd.sdd_negate(qrimes1, pmgr), pmgr) kappa = sdd.sdd_conjoin(kappa, _sdd_used_pos(var, pmgr), pmgr) gamma = sdd.sdd_disjoin(gamma, kappa, pmgr) cache1[key] = gamma return gamma
def _primes_one_given_term(alpha, variables, inst, cache, cache_dummy, pmgr, mgr): if len(variables) == 0: if sdd.sdd_node_is_true(alpha): return sdd.sdd_manager_true(pmgr) if sdd.sdd_node_is_false(alpha): return sdd.sdd_manager_false(pmgr) #add cases for true/false key = (len(variables), sdd.sdd_id(alpha)) if key in cache: return cache[key] var, remaining = variables[0], variables[1:] val, remaining_val = inst[0], inst[1:] beta2 = sdd.sdd_forall(var, alpha, mgr) gamma2 = _primes_one_given_term(beta2, remaining, remaining_val, cache, cache_dummy, pmgr, mgr) gamma9 = gamma2 pvar = 3 * (var - 1) + 1 kappa2 = sdd.sdd_manager_literal(-pvar, pmgr) gamma2 = sdd.sdd_conjoin(gamma2, kappa2, pmgr) if val == 0: beta0 = sdd.sdd_condition(-var, alpha, mgr) gamma0 = _primes_one_given_term(beta0, remaining, remaining_val, cache, cache_dummy, pmgr, mgr) gamma0 = sdd.sdd_conjoin(gamma0, sdd.sdd_negate(gamma9, pmgr), pmgr) kappa0 = sdd.sdd_conjoin(sdd.sdd_manager_literal(-(pvar + 1), pmgr), sdd.sdd_manager_literal((pvar + 2), pmgr), pmgr) kappa0 = sdd.sdd_conjoin(kappa0, sdd.sdd_manager_literal(pvar, pmgr), pmgr) gamma0 = sdd.sdd_conjoin(gamma0, kappa0, pmgr) #gamma0 = sdd.sdd_conjoin(gamma0,sdd.sdd_negate(gamma9,pmgr),pmgr) if val == 1: beta1 = sdd.sdd_condition(var, alpha, mgr) gamma1 = _primes_one_given_term(beta1, remaining, remaining_val, cache, cache_dummy, pmgr, mgr) gamma1 = sdd.sdd_conjoin(gamma1, sdd.sdd_negate(gamma9, pmgr), pmgr) kappa1 = sdd.sdd_conjoin(sdd.sdd_manager_literal((pvar + 1), pmgr), sdd.sdd_manager_literal(-(pvar + 2), pmgr), pmgr) kappa1 = sdd.sdd_conjoin(kappa1, sdd.sdd_manager_literal(pvar, pmgr), pmgr) gamma1 = sdd.sdd_conjoin(gamma1, kappa1, pmgr) #gamma1 = sdd.sdd_conjoin(gamma1,sdd.sdd_negate(gamma9,pmgr),pmgr) if val == 0: gamma = sdd.sdd_disjoin(gamma0, gamma2, pmgr) if val == 1: gamma = sdd.sdd_disjoin(gamma1, gamma2, pmgr) #gamma = sdd.sdd_disjoin(sdd.sdd_disjoin(gamma0, gamma1, pmgr), gamma2, pmgr) #if len(variables) > 60: # print len(variables), sdd.sdd_manager_count(mgr) cache[key] = gamma return gamma
def test_admission(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # WFEG # ( w ^ g ) alpha = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(4, mgr), mgr) # ( w ^ f ^ e ) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(beta, sdd.sdd_manager_literal(3, mgr), mgr) # ( f ^ e ^ g ) gamma = sdd.sdd_conjoin(sdd.sdd_manager_literal(2, mgr), sdd.sdd_manager_literal(3, mgr), mgr) gamma = sdd.sdd_conjoin(gamma, sdd.sdd_manager_literal(4, mgr), mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) alpha = sdd.sdd_disjoin(alpha, gamma, mgr) alpha = sdd.sdd_negate(alpha, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) for model in models.models(alpha, vtree): print "==", models.str_model(model) model_list = [model[var] for var in sorted(model.keys())] gamma, pmgr = compatible_primes(alpha, model_list, mgr, primes_mgr=(beta, pmgr)) pvtree = sdd.sdd_manager_vtree(pmgr) for prime_model in models.models(gamma, pvtree): print models.str_model(prime_model) term = prime_to_dict(prime_model, var_count) print " ".join([ ("*" if var not in term else "+" if term[var] == 1 else "-") for var in xrange(1, var_count + 1) ]) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def parse_bdd(filename): var_count,node_count = pre_parse_bdd(filename) print " zdd var count:", var_count print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() return manager,nodes[-1][0]
def GetLocalConstraintsForRoot(self, file_prefix): then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename = "%s/%s_then_sdd.sdd" % (file_prefix, self.name) constraint = {} constraint["then_vtree"] = then_vtree_filename constraint["then"] = [then_sdd_filename] universe = [] # internal edges for sub_region_edge_tup in self.sub_region_edges: universe.append(sub_region_edge_tup) GraphSet.set_universe(universe) universe = GraphSet.universe() paths = GraphSet() child_names = self.children.keys() for (i, j) in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(i, j)) name_to_sdd_index = {} zdd_to_sdd_index = [None] # for generating sdd from graphset sdd_index = 0 for child in child_names: sdd_index += 1 name_to_sdd_index["c%s" % child] = sdd_index for sub_region_edge in universe: corresponding_network_edges = self.sub_region_edges[ sub_region_edge] coresponding_network_edges_sdd_index = [] for single_edge in corresponding_network_edges: sdd_index += 1 name_to_sdd_index[str(single_edge)] = sdd_index coresponding_network_edges_sdd_index.append(sdd_index) zdd_to_sdd_index.append(coresponding_network_edges_sdd_index) constraint["then_variable_mapping"] = name_to_sdd_index rl_vtree = sdd.sdd_vtree_new(sdd_index, "right") sdd_manager = sdd.sdd_manager_new(rl_vtree) sdd.sdd_vtree_free(rl_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(sdd_manager) # Construct simple path constraint simple_path_constraint = generate_sdd_from_graphset( paths, sdd_manager, zdd_to_sdd_index) # non empty path in this region map none_of_child = sdd.util.sdd_negative_term( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) case_one = sdd.sdd_conjoin(none_of_child, simple_path_constraint, sdd_manager) # empty path in this region map exactly_one_child = sdd.util.sdd_exactly_one( sdd_manager, [name_to_sdd_index["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( sdd_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(exactly_one_child, empty_path_constraint, sdd_manager) total_constraint = sdd.sdd_disjoin(case_one, case_two, sdd_manager) sdd.sdd_save(then_sdd_filename, total_constraint) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(sdd_manager)) sdd.sdd_manager_free(sdd_manager) return constraint
def test(): var_count = 4 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # A v B alpha = sdd.sdd_disjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), sdd.sdd_manager_literal(-4, mgr), mgr) # A v B v ( ~C ^ ~D ) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models #beta2 = sdd.sdd_global_minimize_cardinality(beta,pmgr) beta2 = beta for model in models.models(beta2, pvtree): print models.str_model(model) global cache_hits print "cache-hits:", cache_hits print "all-ones" beta, pmgr = compatible_primes(alpha, [1, 1, 1, 1], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "all-zeros" beta, pmgr = compatible_primes(alpha, [0, 0, 0, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "blah" beta, pmgr = compatible_primes(alpha, [1, 0, 1, 0], mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def _remove_dummies(alpha, var_count, pmgr): for var in xrange(1, var_count + 1): var = 3 * (var - 1) + 1 beta = sdd.sdd_manager_literal(-var, pmgr) gamma = sdd.sdd_disjoin(sdd.sdd_manager_literal(var + 1, pmgr), sdd.sdd_manager_literal(var + 2, pmgr), pmgr) beta = sdd.sdd_conjoin(beta, gamma, pmgr) alpha = sdd.sdd_conjoin(alpha, sdd.sdd_negate(beta, pmgr), pmgr) return alpha
def encode_unique_constraint(values, mgr): alpha = sdd.sdd_manager_true(mgr) # at most one for v1 in values: for v2 in values: if v1 == v2: continue beta = sdd.sdd_disjoin(sdd.sdd_manager_literal(-1 * v1, mgr), sdd.sdd_manager_literal(-1 * v2, mgr), mgr) alpha = sdd.sdd_conjoin(alpha, beta, mgr) # at least one beta = sdd.sdd_manager_false(mgr) for v in values: beta = sdd.sdd_disjoin(beta, sdd.sdd_manager_literal(v, mgr), mgr) alpha = sdd.sdd_conjoin(alpha, beta, mgr) return alpha
def _primes_one(alpha, variables, cache, cache_dummy, pmgr, mgr): if len(variables) == 0: if sdd.sdd_node_is_true(alpha): return sdd.sdd_manager_true(pmgr) if sdd.sdd_node_is_false(alpha): return sdd.sdd_manager_false(pmgr) #add cases for true/false key = (len(variables), sdd.sdd_id(alpha)) if key in cache: global cache_hits cache_hits += 1 #if cache_hits % 1000 == 0: print "cache-hits-update:", cache_hits return cache[key] var, remaining = variables[0], variables[1:] beta2 = sdd.sdd_forall(var, alpha, mgr) gamma2 = _primes_one(beta2, remaining, cache, cache_dummy, pmgr, mgr) gamma9 = gamma2 pvar = 3 * (var - 1) + 1 kappa2 = sdd.sdd_manager_literal(-pvar, pmgr) gamma2 = sdd.sdd_conjoin(gamma2, kappa2, pmgr) beta0 = sdd.sdd_condition(-var, alpha, mgr) gamma0 = _primes_one(beta0, remaining, cache, cache_dummy, pmgr, mgr) gamma0 = sdd.sdd_conjoin(gamma0, sdd.sdd_negate(gamma9, pmgr), pmgr) kappa0 = sdd.sdd_conjoin(sdd.sdd_manager_literal(-(pvar + 1), pmgr), sdd.sdd_manager_literal((pvar + 2), pmgr), pmgr) kappa0 = sdd.sdd_conjoin(kappa0, sdd.sdd_manager_literal(pvar, pmgr), pmgr) gamma0 = sdd.sdd_conjoin(gamma0, kappa0, pmgr) #gamma0 = sdd.sdd_conjoin(gamma0,sdd.sdd_negate(gamma9,pmgr),pmgr) beta1 = sdd.sdd_condition(var, alpha, mgr) gamma1 = _primes_one(beta1, remaining, cache, cache_dummy, pmgr, mgr) gamma1 = sdd.sdd_conjoin(gamma1, sdd.sdd_negate(gamma9, pmgr), pmgr) kappa1 = sdd.sdd_conjoin(sdd.sdd_manager_literal((pvar + 1), pmgr), sdd.sdd_manager_literal(-(pvar + 2), pmgr), pmgr) kappa1 = sdd.sdd_conjoin(kappa1, sdd.sdd_manager_literal(pvar, pmgr), pmgr) gamma1 = sdd.sdd_conjoin(gamma1, kappa1, pmgr) #gamma1 = sdd.sdd_conjoin(gamma1,sdd.sdd_negate(gamma9,pmgr),pmgr) gamma = sdd.sdd_disjoin(gamma0, gamma1, pmgr) gamma = sdd.sdd_disjoin(gamma, gamma2, pmgr) cache[key] = gamma return gamma
def test_andy(): var_count = 3 vtree = sdd.sdd_vtree_new(var_count, "balanced") mgr = sdd.sdd_manager_new(vtree) # 100, 101, 111, 001, 011 alpha = sdd.sdd_manager_false(mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(-2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(-1, mgr), sdd.sdd_manager_literal(2, mgr), mgr) beta = sdd.sdd_conjoin(sdd.sdd_manager_literal(3, mgr), beta, mgr) alpha = sdd.sdd_disjoin(alpha, beta, mgr) beta, pmgr = primes(alpha, mgr) _sanity_check(alpha, mgr, beta, pmgr) vtree = sdd.sdd_manager_vtree(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) import models for model in models.models(alpha, vtree): print models.str_model(model) for model in models.models(beta, pvtree): print models.str_model(model) print "dead-nodes:", sdd.sdd_manager_dead_count(mgr) print "dead-nodes:", sdd.sdd_manager_dead_count(pmgr)
def _keep_imp(beta, alpha, variables, cache1, cache2, pmgr, mgr): #if len(variables) == 0: if sdd.sdd_node_is_false(beta): return sdd.sdd_manager_false(pmgr) if sdd.sdd_node_is_false(alpha): return sdd.sdd_manager_false(pmgr) if sdd.sdd_node_is_true(alpha): return beta key = (len(variables), sdd.sdd_id(alpha), sdd.sdd_id(beta)) if key in cache2: global cache_hits cache_hits += 1 #if cache_hits % 1000 == 0: print "cache-hits-update:", cache_hits return cache2[key] var, remaining = variables[0], variables[1:] pvar = 3 * (var - 1) + 1 alpha0 = sdd.sdd_condition(-var, alpha, mgr) alpha1 = sdd.sdd_condition(var, alpha, mgr) beta0 = sdd.sdd_condition(pvar, beta, pmgr) beta0 = sdd.sdd_condition(-(pvar + 1), beta0, pmgr) #beta0 = sdd.sdd_condition( (pvar+2),beta0,pmgr) beta1 = sdd.sdd_condition(pvar, beta, pmgr) beta1 = sdd.sdd_condition((pvar + 1), beta1, pmgr) #beta1 = sdd.sdd_condition(-(pvar+2),beta1,pmgr) betad = sdd.sdd_condition(-pvar, beta, pmgr) P = _keep_imp(betad, alpha0, remaining, cache1, cache2, pmgr, mgr) Q = _keep_imp(betad, alpha1, remaining, cache1, cache2, pmgr, mgr) R0 = _keep_imp(beta0, alpha0, remaining, cache1, cache2, pmgr, mgr) R1 = _keep_imp(beta1, alpha1, remaining, cache1, cache2, pmgr, mgr) gamma = sdd.sdd_conjoin(P, Q, pmgr) gamma = sdd.sdd_conjoin(_sdd_unused(var, pmgr), gamma, pmgr) kappa = sdd.sdd_conjoin(_sdd_used_neg(var, pmgr), R0, pmgr) gamma = sdd.sdd_disjoin(gamma, kappa, pmgr) kappa = sdd.sdd_conjoin(_sdd_used_pos(var, pmgr), R1, pmgr) gamma = sdd.sdd_disjoin(gamma, kappa, pmgr) cache2[key] = gamma return gamma
def generate_exactly_two_from_tuples(sdd_manager, tuples, variables): result_constraint = sdd.sdd_manager_false(sdd_manager) for cur_tup in tuples: cur_term = sdd.sdd_manager_true(sdd_manager) for cur_var in variables: if cur_var in cur_tup: cur_term = sdd.sdd_conjoin( cur_term, sdd.sdd_manager_literal(cur_var, sdd_manager), sdd_manager) else: cur_term = sdd.sdd_conjoin( cur_term, sdd.sdd_manager_literal(-cur_var, sdd_manager), sdd_manager) result_constraint = sdd.sdd_disjoin(cur_term, result_constraint, sdd_manager) return result_constraint
def _sanity_check(f, mgr, g, pmgr): """f is original function and g is its prime implicants""" alpha = sdd.sdd_manager_false(mgr) pvtree = sdd.sdd_manager_vtree(pmgr) for prime in models.models(g, pvtree): term = prime_to_term(prime, mgr) beta = sdd.sdd_conjoin(term, f, mgr) assert term == beta assert _is_prime(prime, f, mgr) alpha = sdd.sdd_disjoin(alpha, term, mgr) mc1 = sdd.sdd_global_model_count(f, mgr) mc2 = sdd.sdd_global_model_count(alpha, mgr) print "mc-check:", mc1, mc2, ("ok" if mc1 == mc2 else "NOT OK") assert mc1 == mc2 assert alpha == f
def compatible_primes(alpha, inst, mgr, primes_mgr=None): if primes_mgr is None: beta, pmgr = primes(alpha, mgr) else: beta, pmgr = primes_mgr asdf = beta for i, val in enumerate(inst): var = i + 1 pvar = 3 * (var - 1) + 1 lit = (pvar + 1) if val == 1 else -(pvar + 1) gamma = sdd.sdd_conjoin(sdd.sdd_manager_literal(pvar, pmgr), sdd.sdd_manager_literal(lit, pmgr), pmgr) gamma = sdd.sdd_disjoin(gamma, sdd.sdd_manager_literal(-pvar, pmgr), pmgr) beta = sdd.sdd_conjoin(beta, gamma, pmgr) return beta, pmgr
def convert_helper(node, mgr, obdd, dp, depth): if node == 'S1': return sdd.sdd_manager_true(mgr) if node == 'S0': return sdd.sdd_manager_false(mgr) if node in dp: return dp[node] var, ch0, ch1 = obdd[node][0] + 1, obdd[node][1], obdd[node][2] #print var alpha = sdd.sdd_conjoin(convert_helper(ch0, mgr, obdd, dp, depth + 1), sdd.sdd_manager_literal(-1 * var, mgr), mgr) beta = sdd.sdd_conjoin(convert_helper(ch1, mgr, obdd, dp, depth + 1), sdd.sdd_manager_literal(var, mgr), mgr) dp[node] = sdd.sdd_disjoin(alpha, beta, mgr) return dp[node]
def sdd_exactly_one_among(manager, active_variables, background_variables): if not all(x in background_variables for x in active_variables): raise Exception( "Invalid argument active variables %s, background_variables %s " % (active_variables, background_variables)) result = sdd.sdd_manager_false(manager) for positive_variable in active_variables: cur_term = sdd.sdd_manager_true(manager) for variable in background_variables: if variable != positive_variable: cur_lit = sdd.sdd_manager_literal(-variable, manager) else: cur_lit = sdd.sdd_manager_literal(variable, manager) cur_term = sdd.sdd_conjoin(cur_term, cur_lit, manager) sdd.sdd_save("t1.sdd", result) sdd.sdd_save("t2.sdd", cur_term) sdd.sdd_vtree_save("manager.vtree", sdd.sdd_manager_vtree(manager)) result = sdd.sdd_disjoin(result, cur_term, manager) return result
def disjoin2(self, a, b): assert a is not None assert b is not None return sdd.sdd_disjoin(a, b, self.__manager)
def GetLocalConstraintsForInternalClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} # variables for the child clusters then_sdd_index = 0 zdd_to_sdd_index = [None] for child in self.children: then_sdd_index += 1 then_variable_mapping["c%s" % child] = then_sdd_index universe = self.sub_region_edges.keys() GraphSet.set_universe(universe) universe = GraphSet.universe() for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in self.sub_region_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) #***Non empty path in this region map none_of_child = sdd.util.sdd_negative_term( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) paths = GraphSet() child_names = self.children.keys() for c1, c2 in itertools.combinations(child_names, 2): paths = paths.union(GraphSet.paths(c1, c2)) simple_path_constraint = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) case_one = sdd.sdd_conjoin(simple_path_constraint, none_of_child, then_manager) #***Empty path in the region map exactly_one_chlid = sdd.util.sdd_exactly_one( then_manager, [then_variable_mapping["c%s" % child] for child in self.children]) empty_path_constraint = sdd.util.sdd_negative_term( then_manager, sum(zdd_to_sdd_index[1:], [])) case_two = sdd.sdd_conjoin(empty_path_constraint, exactly_one_chlid, then_manager) case_two_then = sdd.sdd_disjoin(case_one, case_two, then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly one of the external edge is used and cluster_indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for child in self.children: if child == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, child)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping[str(e)] for e in self.internal_edges ]), then_manager) #conjoin that all the child indicator is off cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #Exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) cur_case_then = sdd.sdd_conjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, [ then_variable_mapping["c%s" % child] for child in self.children ]), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def GetLocalConstraintsForLeaveClusters(self, file_prefix): if_vtree_filename = "%s/%s_if_vtree.vtree" % (file_prefix, self.name) if_sdd_filename_prefix = "%s/%s_if_sdd" % (file_prefix, self.name) then_vtree_filename = "%s/%s_then_vtree.vtree" % (file_prefix, self.name) then_sdd_filename_prefix = "%s/%s_then_sdd" % (file_prefix, self.name) ifs = [] thens = [] if_variable_mapping = {} if_sdd_index = 0 if_sdd_index += 1 if_variable_mapping[ "c%s" % self.name] = if_sdd_index # cluster indicator for current cluster for external_edge in self.external_edges: if_sdd_index += 1 if_variable_mapping[str(external_edge)] = if_sdd_index then_variable_mapping = {} zdd_to_sdd_index = [None] universe = [] node_pair_to_edges = {} for internal_edge in self.internal_edges: if (internal_edge.x, internal_edge.y) not in node_pair_to_edges: universe.append((internal_edge.x, internal_edge.y)) node_pair_to_edges.setdefault((internal_edge.x, internal_edge.y), []).append(internal_edge) GraphSet.set_universe(universe) universe = GraphSet.universe() then_sdd_index = 0 for node_pair in universe: correponding_sdd_indexes = [] for internal_edge in node_pair_to_edges[node_pair]: then_sdd_index += 1 then_variable_mapping[str(internal_edge)] = then_sdd_index correponding_sdd_indexes.append(then_sdd_index) zdd_to_sdd_index.append(correponding_sdd_indexes) if_vtree, then_vtree = sdd.sdd_vtree_new(if_sdd_index, "right"), sdd.sdd_vtree_new( then_sdd_index, "right") if_manager, then_manager = sdd.sdd_manager_new( if_vtree), sdd.sdd_manager_new(then_vtree) sdd.sdd_manager_auto_gc_and_minimize_off(if_manager) sdd.sdd_manager_auto_gc_and_minimize_off(then_manager) sdd.sdd_vtree_free(if_vtree) sdd.sdd_vtree_free(then_vtree) #none of the external edges are used and cluster indicator is off case_index = 0 case_one_if = sdd.util.sdd_negative_term(if_manager, range(1, if_sdd_index + 1)) case_one_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_one_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_one_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #none of the external edges are used and cluster indicator is on case_index += 1 case_two_if = sdd.util.sdd_exactly_one_among( if_manager, [if_variable_mapping["c%s" % self.name]], range(1, if_sdd_index + 1)) paths = GraphSet() for (i, j) in itertools.combinations(self.nodes, 2): paths = paths.union(GraphSet.paths(i, j)) case_two_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), case_two_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), case_two_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) #exactly one of the external edge is used and cluster indicator is off aggregated_cases = {} for external_edge in self.external_edges: aggregated_cases.setdefault(self.external_edges[external_edge], []).append(external_edge) for entering_node in aggregated_cases: case_index += 1 cur_case_if = sdd.util.sdd_exactly_one_among( if_manager, [ if_variable_mapping[str(e)] for e in aggregated_cases[entering_node] ], range(1, if_sdd_index + 1)) paths = GraphSet() for node in self.nodes: if node == entering_node: continue paths = paths.union(GraphSet.paths(entering_node, node)) cur_case_then = generate_sdd_from_graphset(paths, then_manager, zdd_to_sdd_index) # disjoin the empty path cur_case_then = sdd.sdd_disjoin( cur_case_then, sdd.util.sdd_negative_term(then_manager, range(1, then_sdd_index + 1)), then_manager) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) # exactly two of the external edge is used and cluster_indicator is off aggregated_cases = {} for (i, j) in itertools.combinations(self.external_edges.keys(), 2): entering_points = (self.external_edges[i], self.external_edges[j]) entering_points = (max(entering_points), min(entering_points)) aggregated_cases.setdefault(entering_points, []).append((i, j)) for entering_points in aggregated_cases: case_index += 1 entering_edges = aggregated_cases[entering_points] cur_case_if = generate_exactly_two_from_tuples( if_manager, [(if_variable_mapping[str(e1)], if_variable_mapping[str(e2)]) for (e1, e2) in entering_edges], range(1, if_sdd_index + 1)) if entering_points[0] == entering_points[1]: cur_case_then = sdd.util.sdd_negative_term( then_manager, range(1, then_sdd_index + 1)) else: paths = GraphSet.paths(entering_points[0], entering_points[1]) cur_case_then = generate_sdd_from_graphset( paths, then_manager, zdd_to_sdd_index) sdd.sdd_save("%s_%s" % (if_sdd_filename_prefix, case_index), cur_case_if) sdd.sdd_save("%s_%s" % (then_sdd_filename_prefix, case_index), cur_case_then) ifs.append("%s_%s" % (if_sdd_filename_prefix, case_index)) thens.append("%s_%s" % (then_sdd_filename_prefix, case_index)) sdd.sdd_vtree_save(if_vtree_filename, sdd.sdd_manager_vtree(if_manager)) sdd.sdd_vtree_save(then_vtree_filename, sdd.sdd_manager_vtree(then_manager)) sdd.sdd_manager_free(if_manager) sdd.sdd_manager_free(then_manager) constraint = {} constraint["if_vtree"] = if_vtree_filename constraint["if"] = ifs constraint["if_variable_mapping"] = if_variable_mapping constraint["then_vtree"] = then_vtree_filename constraint["then"] = thens constraint["then_variable_mapping"] = then_variable_mapping return constraint
def _encode_grid_aux(source,sink,nodes,graph,manager, base=None,cache=None,verbose=False): nodes = sorted(nodes) key = (source,tuple(nodes)) if cache and key in cache: return cache[key] if True: # INITIALIZATION FOR (S,T) PATHS if sink not in nodes: # unreachable return sdd.sdd_manager_false(manager) if len(nodes) == 1: # must be sink return sdd.sdd_manager_true(manager) if not g.reachable(source,sink,nodes): alpha = sdd.sdd_manager_false(manager) cache[key] = alpha return alpha if source == sink: # turn off all other edges alpha = sdd.sdd_manager_true(manager) sdd.sdd_ref(alpha,manager) my_nodes = list(nodes) my_nodes.remove(source) for node in my_nodes: # for all unused nodes edges = graph.incident_edges(node,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) alpha,tmp = sdd.sdd_conjoin(alpha,all_false,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) cache[key] = alpha return alpha alpha = sdd.sdd_manager_false(manager) sdd.sdd_ref(alpha,manager) else: # INITIALIZATION FOR ALL PATHS STARTING FROM S # empty graph, source should equal sink if len(nodes) == 1: return sdd.sdd_manager_true(manager) # initial case: no more paths alpha = sdd.sdd_manager_true(manager) sdd.sdd_ref(alpha,manager) my_nodes = list(nodes) my_nodes.remove(source) for node in my_nodes: # for all unused nodes edges = graph.incident_edges(node,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) alpha,tmp = sdd.sdd_conjoin(alpha,all_false,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) # after this, try to extend the paths # first, find incident edges edges = graph.incident_edges(source,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) sdd.sdd_ref(all_false,manager) # for each incident edge my_nodes = list(nodes) my_nodes.remove(source) for edge,sdd_var in zip(edges,sdd_vars): # recurse neighbor = Graph.neighbor(source,edge) gamma = _encode_grid_aux(neighbor,sink,my_nodes,graph,manager, base=base,cache=cache,verbose=verbose) if sdd.sdd_node_is_false(gamma): continue # exactly one edge on sdd_lit = sdd.sdd_manager_literal(sdd_var,manager) beta = sdd.sdd_exists(sdd_var,all_false,manager) beta = sdd.sdd_conjoin(beta,sdd_lit,manager) beta = sdd.sdd_conjoin(beta,gamma,manager) # accumulate alpha,tmp = sdd.sdd_disjoin(alpha,beta,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) sdd.sdd_deref(all_false,manager) cache[key] = alpha return alpha
def generate_sdd_from_graphset(paths, sdd_manager, zdd_edge_to_sdd_edges): try: zdd_file = tempfile.TemporaryFile() paths.dump(zdd_file) zdd_file.seek(0) zdd_content = zdd_file.readlines() finally: zdd_file.close() # handle the trivial logic if zdd_content[0].strip() == "T": result_sdd = sdd.sdd_manager_true(sdd_manager) for sdd_edges in zdd_edge_to_sdd_edges: cur_neg_term = sdd.util.sdd_negative_term(sdd_manager, sdd_edges) result_sdd = sdd.sdd_conjoin(result_sdd, cur_neg_term, sdd_manager) return result_sdd if zdd_content[0].strip() == "B": result_sdd = sdd.sdd_manager_false(sdd_manager) return result_sdd pos_zdd_indicator_to_sdd = [None] neg_zdd_indicator_to_sdd = [None] for sdd_edges in zdd_edge_to_sdd_edges: if sdd_edges: pos_zdd_indicator_to_sdd.append( sdd.util.sdd_exactly_one(sdd_manager, sdd_edges)) neg_zdd_indicator_to_sdd.append( sdd.util.sdd_negative_term(sdd_manager, sdd_edges)) conversion_map = {} # key is the node index and the value is a sdd node decision_variable_map = { } # key is the node index and the value is the variable index last_node_index = None zdd_variable_size = len(zdd_edge_to_sdd_edges) - 1 def complete_zdd_child(variable_index, child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager): if child == "T": if variable_index != zdd_variable_size: skipped_variables = range(variable_index + 1, zdd_variable_size + 1) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) return neg_terms else: return sdd.sdd_manager_true(sdd_manager) elif child == "B": return sdd.sdd_manager_false(sdd_manager) else: child = int(child) child_variable = decision_variable_map[child] if child_variable == variable_index + 1: return conversion_map[child] else: skipped_variables = range(variable_index + 1, child_variable) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) return sdd.sdd_conjoin(neg_terms, conversion_map[child], sdd_manager) for line in zdd_content: line = line.strip() if line == ".": break line_toks = line.split(" ") node_index = int(line_toks[0]) variable_index = int(line_toks[1]) low_child = line_toks[2] high_child = line_toks[3] sdd_low_child = None sdd_high_child = None sdd_low_child = complete_zdd_child(variable_index, low_child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager) sdd_high_child = complete_zdd_child(variable_index, high_child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager) cur_node_positive_element = sdd.sdd_conjoin( pos_zdd_indicator_to_sdd[variable_index], sdd_high_child, sdd_manager) cur_node_negative_element = sdd.sdd_conjoin( neg_zdd_indicator_to_sdd[variable_index], sdd_low_child, sdd_manager) conversion_map[node_index] = sdd.sdd_disjoin( cur_node_negative_element, cur_node_positive_element, sdd_manager) decision_variable_map[node_index] = variable_index last_node_index = node_index result = conversion_map[last_node_index] if decision_variable_map[last_node_index] != 1: skipped_variables = range(1, decision_variable_map[last_node_index]) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) result = sdd.sdd_conjoin(neg_terms, conversion_map[last_node_index], sdd_manager) return result
def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == '__main__': var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) #lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count sdd.sdd_manager_free(manager) sdd.sdd_vtree_free(vtree)
def parse_bdd(filename,var_count=None): if var_count is None: var_count,node_count = pre_parse_bdd(filename) else: max_count,node_count = pre_parse_bdd(filename) #print " zdd var count:", var_count #print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() last_sdd,last_vtree = nodes[-1] vt = sdd.sdd_manager_vtree(manager) if vt != last_vtree: last_sdd = zero_normalize_sdd(last_sdd,last_vtree,vt,manager) return manager,last_sdd
def disjoin2(self, a, b): return sdd.sdd_disjoin(a, b, self.__manager)
def compile_all(forest_sdds, used_vars_list, num_trees, domain, manager, constraint_sdd=None): half = int(math.ceil(num_trees / 2.0)) true_sdd = sdd.sdd_manager_true(manager) false_sdd = sdd.sdd_manager_false(manager) last_size = 2**16 if not constraint_sdd: constraint_sdd = sdd.sdd_manager_true(manager) true_sdd = constraint_sdd sdd.sdd_ref(true_sdd, manager) to_compile_sdds = [tree_sdd for tree_sdd in forest_sdds] used_vars_list = [used_vars for used_vars in used_vars_list] ''' if OPTIONS.majority_circuit_opt: majority_sdds = [sdd.sdd_manager_literal(domain["Tree_%d" % i], manager) for i in xrange(num_trees)] for single_sdd in majority_sdds: sdd.sdd_ref(single_sdd, manager) to_compile_sdds = majority_sdds used_vars_list = [set() for _ in forest_sdds] ''' cur = [true_sdd, false_sdd] used_vars = set() for k in xrange(num_trees): last, cur = cur, [] tree_index = pick_next_tree(used_vars_list, used_vars) tree_sdd = to_compile_sdds[tree_index] used_vars |= used_vars_list[tree_index] to_compile_sdds = to_compile_sdds[:tree_index] + to_compile_sdds[ tree_index + 1:] used_vars_list = used_vars_list[:tree_index] + used_vars_list[ tree_index + 1:] for i in xrange(min(half, k + 1) + 1): cur_sdd = last[i] #cur_sdd = sdd.sdd_conjoin(sdd.sdd_negate(tree_sdd,manager),cur_sdd,manager) """ elif i+(num_trees-k) < half: # don't bother cur_sdd = sdd.sdd_manager_false(manager) """ if i == 0: pass elif i > 0: alpha = sdd.sdd_conjoin(tree_sdd, last[i - 1], manager) sdd.sdd_deref(last[i - 1], manager) cur_sdd = sdd.sdd_disjoin(cur_sdd, alpha, manager) sdd.sdd_ref(cur_sdd, manager) cur.append(cur_sdd) if sdd.sdd_manager_dead_count(manager) >= 2 * last_size: sdd.sdd_manager_garbage_collect(manager) if sdd.sdd_manager_live_count(manager) >= 2 * last_size: print "*", sdd.sdd_manager_minimize_limited(manager) last_size = 2 * last_size if k >= half: sdd.sdd_deref(last[-2], manager) sdd.sdd_deref(tree_sdd, manager) cur.append(false_sdd) print "%d" % (num_trees - k), sys.stdout.flush() #print "%d/%d" % (k,num_trees) print "live size:", sdd.sdd_manager_live_count(manager) #print "dead size:", sdd.sdd_manager_dead_count(manager) sdd.sdd_manager_garbage_collect(manager) #sdd.sdd_manager_minimize_limited(manager) #for alpha in cur: sdd.sdd_deref(alpha,manager) ret = cur[-2] ''' if OPTIONS.majority_circuit_opt: # save ret (the majority circuit) # save each individual tree_sdd vtree = sdd.sdd_manager_vtree(manager) majority_sdd_filename = "%s_majority.sdd" % sdd_basename majority_vtree_filename = "%s_majority.vtree" % sdd_basename print "Writing majority sdd file %s and majority vtree file %s" % (majority_sdd_filename, majority_vtree_filename) sdd.sdd_save(majority_sdd_filename,ret) sdd.sdd_vtree_save(majority_vtree_filename,vtree) print "Writing individual tree sdds..." for k,tree_sdd in enumerate(forest_sdds): tree_name = "tree_%d" % k tree_sdd_filename = "%s_majority_%s.sdd" % (sdd_basename, tree_name) sdd.sdd_save(tree_sdd_filename, tree_sdd) gamma = sdd.sdd_manager_true(manager) for k,tree_sdd in enumerate(forest_sdds): new_gamma = sdd.sdd_conjoin(gamma, tree_sdd, manager) sdd.sdd_ref(new_gamma, manager) sdd.sdd_deref(gamma, manager) gamma = new_gamma if sdd.sdd_manager_dead_count(manager) >= 2*last_size: sdd.sdd_manager_garbage_collect(manager) if sdd.sdd_manager_live_count(manager) >= 2*last_size: print "*", sdd.sdd_manager_minimize_limited(manager) last_size = 2*last_size print "%d" % k, sys.stdout.flush() print "live size:", sdd.sdd_manager_live_count(manager) ret = sdd.sdd_conjoin(ret, gamma, manager) #existential quantification print "Existential quantification..." exists_map = sdd.new_intArray(len(domain)) for i in xrange(len(domain)): sdd.intArray_setitem(exists_map,i,0) for i in xrange(num_trees): lit = domain["Tree_%d" % i] sdd.intArray_setitem(exists_map,lit,1) ret = sdd.sdd_exists_multiple(exists_map, ret, manager) ''' return ret
def sdd_disjunctive_of_terms(manager, variables, positive_variable_tuples): result = sdd.sdd_manager_false(manager) for positive_tuple in positive_variable_tuples: cur_term = sdd_term(manager, variables, positive_tuple) result = sdd.sdd_disjoin(result, cur_term, manager) return result
def str_model(model, var_count=None): """Convert model to string.""" if var_count is None: var_count = len(model) return " ".join(str(model[var]) for var in xrange(1, var_count + 1)) if __name__ == "__main__": var_count = 10 vtree = sdd.sdd_vtree_new(var_count, "balanced") manager = sdd.sdd_manager_new(vtree) alpha = sdd.sdd_manager_false(manager) for var in xrange(1, var_count + 1): lit = sdd.sdd_manager_literal(-var, manager) alpha = sdd.sdd_disjoin(alpha, lit, manager) vt = sdd.sdd_manager_vtree(manager) model_count = 0 for model in models(alpha, vt): model_count += 1 print str_model(model, var_count=var_count) # lib_mc = sdd.sdd_model_count(alpha,manager) print "model count: %d" % model_count sdd.sdd_manager_free(manager) sdd.sdd_vtree_free(vtree)