def parse_bdd(filename): var_count,node_count = pre_parse_bdd(filename) print " zdd var count:", var_count print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() return manager,nodes[-1][0]
def _encode_graph(g,manager): # init structures cache = defaultdict(dict) base = sdd.sdd_manager_true(manager) sink = g.nodes[-1] alpha = _encode_grid_aux(g.source,sink,g.nodes,g,manager, base=base,cache=cache,verbose=False) # deref everything in cache for key in cache: beta = cache[key] sdd.sdd_deref(beta,manager) #sdd.sdd_ref(alpha,manager) return alpha
def true(self): return sdd.sdd_manager_true(self.__manager)
def generate_sdd_from_graphset(paths, sdd_manager, zdd_edge_to_sdd_edges): try: zdd_file = tempfile.TemporaryFile() paths.dump(zdd_file) zdd_file.seek(0) zdd_content = zdd_file.readlines() finally: zdd_file.close() # handle the trivial logic if zdd_content[0].strip() == "T": result_sdd = sdd.sdd_manager_true(sdd_manager) for sdd_edges in zdd_edge_to_sdd_edges: cur_neg_term = sdd.util.sdd_negative_term(sdd_manager, sdd_edges) result_sdd = sdd.sdd_conjoin(result_sdd, cur_neg_term, sdd_manager) return result_sdd if zdd_content[0].strip() == "B": result_sdd = sdd.sdd_manager_false(sdd_manager) return result_sdd pos_zdd_indicator_to_sdd = [None] neg_zdd_indicator_to_sdd = [None] for sdd_edges in zdd_edge_to_sdd_edges: if sdd_edges: pos_zdd_indicator_to_sdd.append( sdd.util.sdd_exactly_one(sdd_manager, sdd_edges)) neg_zdd_indicator_to_sdd.append( sdd.util.sdd_negative_term(sdd_manager, sdd_edges)) conversion_map = {} # key is the node index and the value is a sdd node decision_variable_map = { } # key is the node index and the value is the variable index last_node_index = None zdd_variable_size = len(zdd_edge_to_sdd_edges) - 1 def complete_zdd_child(variable_index, child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager): if child == "T": if variable_index != zdd_variable_size: skipped_variables = range(variable_index + 1, zdd_variable_size + 1) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) return neg_terms else: return sdd.sdd_manager_true(sdd_manager) elif child == "B": return sdd.sdd_manager_false(sdd_manager) else: child = int(child) child_variable = decision_variable_map[child] if child_variable == variable_index + 1: return conversion_map[child] else: skipped_variables = range(variable_index + 1, child_variable) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) return sdd.sdd_conjoin(neg_terms, conversion_map[child], sdd_manager) for line in zdd_content: line = line.strip() if line == ".": break line_toks = line.split(" ") node_index = int(line_toks[0]) variable_index = int(line_toks[1]) low_child = line_toks[2] high_child = line_toks[3] sdd_low_child = None sdd_high_child = None sdd_low_child = complete_zdd_child(variable_index, low_child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager) sdd_high_child = complete_zdd_child(variable_index, high_child, conversion_map, decision_variable_map, zdd_variable_size, sdd_manager) cur_node_positive_element = sdd.sdd_conjoin( pos_zdd_indicator_to_sdd[variable_index], sdd_high_child, sdd_manager) cur_node_negative_element = sdd.sdd_conjoin( neg_zdd_indicator_to_sdd[variable_index], sdd_low_child, sdd_manager) conversion_map[node_index] = sdd.sdd_disjoin( cur_node_negative_element, cur_node_positive_element, sdd_manager) decision_variable_map[node_index] = variable_index last_node_index = node_index result = conversion_map[last_node_index] if decision_variable_map[last_node_index] != 1: skipped_variables = range(1, decision_variable_map[last_node_index]) neg_terms = sdd.util.sdd_negative_term( sdd_manager, sum([zdd_edge_to_sdd_edges[x] for x in skipped_variables], [])) result = sdd.sdd_conjoin(neg_terms, conversion_map[last_node_index], sdd_manager) return result
def compile_all(forest_sdds, used_vars_list, num_trees, domain, manager, constraint_sdd=None): half = int(math.ceil(num_trees / 2.0)) true_sdd = sdd.sdd_manager_true(manager) false_sdd = sdd.sdd_manager_false(manager) last_size = 2**16 if not constraint_sdd: constraint_sdd = sdd.sdd_manager_true(manager) true_sdd = constraint_sdd sdd.sdd_ref(true_sdd, manager) to_compile_sdds = [tree_sdd for tree_sdd in forest_sdds] used_vars_list = [used_vars for used_vars in used_vars_list] ''' if OPTIONS.majority_circuit_opt: majority_sdds = [sdd.sdd_manager_literal(domain["Tree_%d" % i], manager) for i in xrange(num_trees)] for single_sdd in majority_sdds: sdd.sdd_ref(single_sdd, manager) to_compile_sdds = majority_sdds used_vars_list = [set() for _ in forest_sdds] ''' cur = [true_sdd, false_sdd] used_vars = set() for k in xrange(num_trees): last, cur = cur, [] tree_index = pick_next_tree(used_vars_list, used_vars) tree_sdd = to_compile_sdds[tree_index] used_vars |= used_vars_list[tree_index] to_compile_sdds = to_compile_sdds[:tree_index] + to_compile_sdds[ tree_index + 1:] used_vars_list = used_vars_list[:tree_index] + used_vars_list[ tree_index + 1:] for i in xrange(min(half, k + 1) + 1): cur_sdd = last[i] #cur_sdd = sdd.sdd_conjoin(sdd.sdd_negate(tree_sdd,manager),cur_sdd,manager) """ elif i+(num_trees-k) < half: # don't bother cur_sdd = sdd.sdd_manager_false(manager) """ if i == 0: pass elif i > 0: alpha = sdd.sdd_conjoin(tree_sdd, last[i - 1], manager) sdd.sdd_deref(last[i - 1], manager) cur_sdd = sdd.sdd_disjoin(cur_sdd, alpha, manager) sdd.sdd_ref(cur_sdd, manager) cur.append(cur_sdd) if sdd.sdd_manager_dead_count(manager) >= 2 * last_size: sdd.sdd_manager_garbage_collect(manager) if sdd.sdd_manager_live_count(manager) >= 2 * last_size: print "*", sdd.sdd_manager_minimize_limited(manager) last_size = 2 * last_size if k >= half: sdd.sdd_deref(last[-2], manager) sdd.sdd_deref(tree_sdd, manager) cur.append(false_sdd) print "%d" % (num_trees - k), sys.stdout.flush() #print "%d/%d" % (k,num_trees) print "live size:", sdd.sdd_manager_live_count(manager) #print "dead size:", sdd.sdd_manager_dead_count(manager) sdd.sdd_manager_garbage_collect(manager) #sdd.sdd_manager_minimize_limited(manager) #for alpha in cur: sdd.sdd_deref(alpha,manager) ret = cur[-2] ''' if OPTIONS.majority_circuit_opt: # save ret (the majority circuit) # save each individual tree_sdd vtree = sdd.sdd_manager_vtree(manager) majority_sdd_filename = "%s_majority.sdd" % sdd_basename majority_vtree_filename = "%s_majority.vtree" % sdd_basename print "Writing majority sdd file %s and majority vtree file %s" % (majority_sdd_filename, majority_vtree_filename) sdd.sdd_save(majority_sdd_filename,ret) sdd.sdd_vtree_save(majority_vtree_filename,vtree) print "Writing individual tree sdds..." for k,tree_sdd in enumerate(forest_sdds): tree_name = "tree_%d" % k tree_sdd_filename = "%s_majority_%s.sdd" % (sdd_basename, tree_name) sdd.sdd_save(tree_sdd_filename, tree_sdd) gamma = sdd.sdd_manager_true(manager) for k,tree_sdd in enumerate(forest_sdds): new_gamma = sdd.sdd_conjoin(gamma, tree_sdd, manager) sdd.sdd_ref(new_gamma, manager) sdd.sdd_deref(gamma, manager) gamma = new_gamma if sdd.sdd_manager_dead_count(manager) >= 2*last_size: sdd.sdd_manager_garbage_collect(manager) if sdd.sdd_manager_live_count(manager) >= 2*last_size: print "*", sdd.sdd_manager_minimize_limited(manager) last_size = 2*last_size print "%d" % k, sys.stdout.flush() print "live size:", sdd.sdd_manager_live_count(manager) ret = sdd.sdd_conjoin(ret, gamma, manager) #existential quantification print "Existential quantification..." exists_map = sdd.new_intArray(len(domain)) for i in xrange(len(domain)): sdd.intArray_setitem(exists_map,i,0) for i in xrange(num_trees): lit = domain["Tree_%d" % i] sdd.intArray_setitem(exists_map,lit,1) ret = sdd.sdd_exists_multiple(exists_map, ret, manager) ''' return ret
def parse_bdd(filename,var_count=None): if var_count is None: var_count,node_count = pre_parse_bdd(filename) else: max_count,node_count = pre_parse_bdd(filename) #print " zdd var count:", var_count #print " zdd node count:", node_count manager = start_manager(var_count,range(1,var_count+1)) root = sdd.sdd_manager_vtree(manager) nodes = [None] * (node_count+1) index,id2index = 1,{} f = open(filename) for line in f.readlines(): if line.startswith("."): break line = line.strip().split() nid = int(line[0]) dvar = int(line[1]) lo,hi = line[2],line[3] hi_lit = sdd.sdd_manager_literal( dvar,manager) lo_lit = sdd.sdd_manager_literal(-dvar,manager) if lo == 'T': lo_sdd,lo_vtree = sdd.sdd_manager_true(manager),None elif lo == 'B': lo_sdd,lo_vtree = sdd.sdd_manager_false(manager),None else: lo_id = int(lo) lo_sdd,lo_vtree = nodes[id2index[lo_id]] if hi == 'T': hi_sdd,hi_vtree = sdd.sdd_manager_true(manager),None elif hi == 'B': hi_sdd,hi_vtree = sdd.sdd_manager_false(manager),None else: hi_id = int(hi) hi_sdd,hi_vtree = nodes[id2index[hi_id]] #v1,v2 = sdd.sdd_vtree_of(hi_lit),sdd.sdd_vtree_of(hi_sdd) #vt = sdd.sdd_vtree_lca(v1,v2,root) vt = sdd.sdd_manager_vtree_of_var(dvar,manager) vt = sdd.sdd_vtree_parent(vt) vt = sdd.sdd_vtree_right(vt) if dvar < var_count: hi_sdd = zero_normalize_sdd(hi_sdd,hi_vtree,vt,manager) lo_sdd = zero_normalize_sdd(lo_sdd,lo_vtree,vt,manager) vt = sdd.sdd_vtree_parent(vt) hi_sdd = sdd.sdd_conjoin(hi_lit,hi_sdd,manager) lo_sdd = sdd.sdd_conjoin(lo_lit,lo_sdd,manager) alpha = sdd.sdd_disjoin(hi_sdd,lo_sdd,manager) nodes[index] = (alpha,vt) id2index[nid] = index index += 1 f.close() last_sdd,last_vtree = nodes[-1] vt = sdd.sdd_manager_vtree(manager) if vt != last_vtree: last_sdd = zero_normalize_sdd(last_sdd,last_vtree,vt,manager) return manager,last_sdd
def all_false_term(var_list,manager): alpha = sdd.sdd_manager_true(manager) for var in var_list: lit = sdd.sdd_manager_literal(-var,manager) alpha = sdd.sdd_conjoin(alpha,lit,manager) return alpha
def _encode_grid_aux(source,sink,nodes,graph,manager, base=None,cache=None,verbose=False): nodes = sorted(nodes) key = (source,tuple(nodes)) if cache and key in cache: return cache[key] if True: # INITIALIZATION FOR (S,T) PATHS if sink not in nodes: # unreachable return sdd.sdd_manager_false(manager) if len(nodes) == 1: # must be sink return sdd.sdd_manager_true(manager) if not g.reachable(source,sink,nodes): alpha = sdd.sdd_manager_false(manager) cache[key] = alpha return alpha if source == sink: # turn off all other edges alpha = sdd.sdd_manager_true(manager) sdd.sdd_ref(alpha,manager) my_nodes = list(nodes) my_nodes.remove(source) for node in my_nodes: # for all unused nodes edges = graph.incident_edges(node,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) alpha,tmp = sdd.sdd_conjoin(alpha,all_false,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) cache[key] = alpha return alpha alpha = sdd.sdd_manager_false(manager) sdd.sdd_ref(alpha,manager) else: # INITIALIZATION FOR ALL PATHS STARTING FROM S # empty graph, source should equal sink if len(nodes) == 1: return sdd.sdd_manager_true(manager) # initial case: no more paths alpha = sdd.sdd_manager_true(manager) sdd.sdd_ref(alpha,manager) my_nodes = list(nodes) my_nodes.remove(source) for node in my_nodes: # for all unused nodes edges = graph.incident_edges(node,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) alpha,tmp = sdd.sdd_conjoin(alpha,all_false,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) # after this, try to extend the paths # first, find incident edges edges = graph.incident_edges(source,nodes=nodes) sdd_vars = [ graph.edge_to_index[edge] + 1 for edge in edges ] all_false = all_false_term(sdd_vars,manager) sdd.sdd_ref(all_false,manager) # for each incident edge my_nodes = list(nodes) my_nodes.remove(source) for edge,sdd_var in zip(edges,sdd_vars): # recurse neighbor = Graph.neighbor(source,edge) gamma = _encode_grid_aux(neighbor,sink,my_nodes,graph,manager, base=base,cache=cache,verbose=verbose) if sdd.sdd_node_is_false(gamma): continue # exactly one edge on sdd_lit = sdd.sdd_manager_literal(sdd_var,manager) beta = sdd.sdd_exists(sdd_var,all_false,manager) beta = sdd.sdd_conjoin(beta,sdd_lit,manager) beta = sdd.sdd_conjoin(beta,gamma,manager) # accumulate alpha,tmp = sdd.sdd_disjoin(alpha,beta,manager),alpha sdd.sdd_ref(alpha,manager); sdd.sdd_deref(tmp,manager) sdd.sdd_deref(all_false,manager) cache[key] = alpha return alpha