def decompose(num_vertices, edges, htd, node_map=None, minor_graph=None, **kwargs): logger.debug(f"Using tree decomposition seed: {kwargs['runid']}") # Run htd p = subprocess.Popen([htd["path"], "--seed", str(kwargs["runid"]), *htd["parameters"]], stdin=subprocess.PIPE, stdout=subprocess.PIPE) if "gr_file" in kwargs and kwargs["gr_file"]: logger.debug("Writing graph file") with FileWriter(kwargs["gr_file"]) as fw: fw.write_gr(num_vertices,edges) logger.debug("Running htd") StreamWriter(p.stdin).write_gr(num_vertices,edges) p.stdin.close() tdr = TdReader.from_stream(p.stdout) p.wait() if node_map: logger.debug("De-normalizing tree decomposition") tdr.bags = {k: [node_map[vv] for vv in v] for k, v in tdr.bags.items()} logger.debug("Parsing tree decomposition") #td = TreeDecomp(tdr.num_bags, tdr.tree_width, tdr.num_orig_vertices, problem.get_root(tdr.bags, tdr.adjacency_list, tdr.root), tdr.bags, tdr.adjacency_list) td = TreeDecomp(tdr.num_bags, tdr.tree_width, tdr.num_orig_vertices, tdr.root, tdr.bags, tdr.adjacency_list, minor_graph) logger.info(f"Tree decomposition #bags: {td.num_bags} tree_width: {td.tree_width} #vertices: {td.num_orig_vertices} #leafs: {len(td.leafs)} #edges: {len(td.edges)}") if "td_file" in kwargs and kwargs["td_file"]: with FileWriter(kwargs["td_file"]) as fw: fw.write_td(tdr.num_bags, tdr.tree_width, tdr.num_orig_vertices, tdr.root, tdr.bags, td.edges) return td
def solve_problem(cfg, cls, file, **kwargs): def signal_handler(sig, frame): if sig == signal.SIGUSR1: logger.warning("Terminating because of error in worker thread") else: logger.warning("Killing all connections") problem.interrupt() app_name = None if "application_name" in cfg["db"]["dsn"]: app_name = cfg["db"]["dsn"]["application_name"] admin_db.killall(app_name) sys.exit(0) admin_db = DBAdmin.from_cfg(cfg["db_admin"]) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGUSR1, signal_handler) pool = BlockingThreadedConnectionPool(1,cfg["db"]["max_connections"],**cfg["db"]["dsn"]) problem = cls(file,pool, **cfg["dpdb"], **kwargs) logger.info("Using tree decomposition seed: {}".format(kwargs["runid"])) # Run htd p = subprocess.Popen([cfg["htd"]["path"], "--seed", str(kwargs["runid"]), *cfg["htd"]["parameters"]], stdin=subprocess.PIPE, stdout=subprocess.PIPE) logger.info("Parsing input file") input = problem.prepare_input(file) if "gr_file" in kwargs and kwargs["gr_file"]: logger.info("Writing graph file") with FileWriter(kwargs["gr_file"]) as fw: fw.write_gr(*input) logger.info("Running htd") StreamWriter(p.stdin).write_gr(*input) p.stdin.close() tdr = TdReader.from_stream(p.stdout) p.wait() # solve it logger.info("Parsing tree decomposition") td = TreeDecomp(tdr.num_bags, tdr.tree_width, tdr.num_orig_vertices, tdr.root, tdr.bags, tdr.adjacency_list) logger.info(f"#bags: {td.num_bags} tree_width: {td.tree_width} #vertices: {td.num_orig_vertices} #leafs: {len(td.leafs)} #edges: {len(td.edges)}") if "td_file" in kwargs and kwargs["td_file"]: with FileWriter(kwargs["td_file"]) as fw: fw.write_td(tdr.num_bags, tdr.tree_width, tdr.num_orig_vertices, tdr.root, tdr.bags, td.edges) if td.tree_width <= tw_limit: problem.set_td(td) problem.setup() if "faster" not in kwargs or not kwargs["faster"]: problem.store_cfg(flatten_cfg(cfg,("db.dsn","db_admin","htd.path"))) problem.solve() else: print("Treewidth Limit Reached")
def call_solver(self, type): global cfg # logger.info(f"Call solver: {type} with #vars {self.formula.num_vars}, #clauses {len(self.formula.clauses)}, #projected {len(self.projected)}") cfg_str = f"{type}_solver" assert(cfg_str in cfg["nesthdb"]) assert("path" in cfg["nesthdb"][cfg_str]) local_cfg = cfg["nesthdb"][cfg_str] solver = [local_cfg["path"]] assert ("output_parser" in cfg["nesthdb"][cfg_str]) if "args" in local_cfg: solver.extend(local_cfg["args"].split(' ')) solver_parser = local_cfg["output_parser"] reader_module = importlib.import_module("dpdb.reader") solver_parser_cls = getattr(reader_module, solver_parser["class"]) tmp = tempfile.NamedTemporaryFile().name with FileWriter(tmp) as fw: fw.write_elp(self.elp) if interrupted: return -1 # self.active_process = psolver = subprocess.Popen(solver + [tmp], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) # output = solver_parser_cls.from_stream(psolver.stdout,**solver_parser["args"]) # psolver.wait() # psolver.stdout.close() self.active_process = psolver = subprocess.Popen(solver + [tmp], stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) try: if self.fallback_depth < cfg["nesthdb"]["fallback_recursion_depth"]: output, _ = psolver.communicate(timeout=cfg["nesthdb"]["max_solver_time"]) else: output, _ = psolver.communicate() psolver.stdout.close() output = solver_parser_cls.from_string(output.decode(), **solver_parser["args"]) except subprocess.TimeoutExpired: logging.warning("Solver ran into timeout: fallback") self.fallback_depth = self.fallback_depth+1 return self.solve(fallback=True) self.active_process = None if interrupted: return -1 result = getattr(output, solver_parser["result"]) if self.count or self.qr: try: result = int(result) if result else 0 except ValueError: result = 1 if result == "SATISFIABLE" else 0 else: result = True if result == "SATISFIABLE" else False logger.info(f"Solver {type} result: {result}") return result
def solve_external(self, num_vars, clauses, extra_clauses, proj_vars=None): logger.debug("Calling external solver for {} with {} clauses, {} vars, and proj {}".format(extra_clauses, len(clauses), num_vars, proj_vars)) maybe_sat = True tmp = tempfile.NamedTemporaryFile().name normalize_cnf = True if self.preprocessor: logger.debug("Preprocessing") ppmc = subprocess.Popen(self.preprocessor,stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) self.sub_procs.add(ppmc) StreamWriter(ppmc.stdin).write_cnf(num_vars,clauses, normalize=True) normalize_cnf = False ppmc.stdin.close() input = CnfReader.from_stream(ppmc.stdout,silent=True) ppmc.wait() ppmc.stdout.close() self.sub_procs.remove(ppmc) maybe_sat = input.maybe_sat num_vars = input.num_vars clauses = input.clauses if maybe_sat and not self.interrupted: with FileWriter(tmp) as fw: fw.write_cnf(num_vars,clauses,normalize=normalize_cnf, proj_vars=proj_vars) for i in range(0,128,1): if self.interrupted: break #if len(self.sat_solver) == 3: #seed given # self.sat_solver[2] = str(random.randrange(13423423471)) added = [] if len(self.sat_solver) > 1 and self.sat_solver[1] == "dpdb.py": added = ["sharpsat"] psat = subprocess.Popen(self.sat_solver + [tmp] + added, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.sub_procs.add(psat) output = self.sat_solver_parser_cls.from_stream(psat.stdout,**self.sat_solver_parser["args"]) psat.wait() psat.stdout.close() self.sub_procs.remove(psat) result = getattr(output,self.sat_solver_parser["result"]) if psat.returncode == 245 or psat.returncode == 250: logger.debug("Retrying call to external solver, returncode {}, index {}".format(psat.returncode, i)) else: logger.debug("No Retry, returncode {}, result {}, index {}".format(psat.returncode, result, i)) break else: result = 0 if result is None: logger.warning("Result is None!") return result
def call_solver(self,type): global cfg logger.info(f"Call solver: {type} with #vars {self.formula.num_vars}, #clauses {len(self.formula.clauses)}, #projected {len(self.projected)}") cfg_str = f"{type}_solver" assert(cfg_str in cfg["nesthdb"]) assert("path" in cfg["nesthdb"][cfg_str]) local_cfg = cfg["nesthdb"][cfg_str] solver = [local_cfg["path"]] if "seed_arg" in local_cfg: solver.append(local_cfg["seed_arg"]) solver.append(str(self.kwargs["runid"])) if "args" in local_cfg: solver.extend(local_cfg["args"].split(' ')) if "output_parser" in local_cfg: solver_parser = local_cfg["output_parser"] reader_module = importlib.import_module("dpdb.reader") solver_parser_cls = getattr(reader_module, solver_parser["class"]) else: solver_parser = {"class":"CnfReader","args":{"silent":True},"result":"models"} solver_parser_cls = CnfReader tmp = tempfile.NamedTemporaryFile().name with FileWriter(tmp) as fw: fw.write_cnf(self.formula.num_vars,self.formula.clauses,normalize=True, proj_vars=self.projected) for i in range(0,128,1): if interrupted: return -1 self.active_process = psat = subprocess.Popen(solver + [tmp], stdout=subprocess.PIPE) output = solver_parser_cls.from_stream(psat.stdout,**solver_parser["args"]) psat.wait() psat.stdout.close() self.active_process = None if interrupted: return -1 result = int(getattr(output,solver_parser["result"])) if psat.returncode == 245 or psat.returncode == 250: logger.debug("Retrying call to external solver, returncode {}, index {}".format(psat.returncode, i)) else: logger.debug("No Retry, returncode {}, result {}, index {}".format(psat.returncode, result, i)) break logger.info(f"Solver {type} result: {result}") return result