def do_search(problem, configname, timeout, memory, debug=False): # TODO: Currently, do_search returns an error msg on error and # None on no error, while do_translate/do_preprocess return # True/False for success/no success. This should be unified. # Maybe throw exceptions if something goes wrong? Also, # maybe we should allow for warnings in addition to errors. # The "skipped -- dir exists" stuff should maybe just be a # warning. outdir = search_dir(problem, configname) if not debug: if os.path.exists(outdir): return "skipped [%s] %s -- dir exists" % (configname, problem) elif not os.path.exists(translate_dir(problem)): return "Translated files for %s not available." % problem elif not os.path.exists(preprocess_dir(problem)): return "Preprocessed files for %s not available." % problem if debug and not os.path.exists(translate_dir(problem)): # Do not abort if translation does not exist. Don't store search output. # (Instead, translate if necessary and always search.) print "Translating and Preprocessing..." success = do_translate(problem) if not success: return "Translating %s failed." % problem success = do_preprocess(problem) if not success: return "Preprocessing %s failed." % problem copy_files(TRANSLATE_OUTPUTS, ".", src_dir=translate_dir(problem)) copy_files(PREPROCESS_OUTPUTS, ".", src_dir=preprocess_dir(problem)) if debug: # Write planner output to screen instead of file. planner = planner_debug_executable() success = benchmark.run( cmd=[planner]+planner_configurations.get_config(configname), timeout=timeout, memory=memory, status="status.log", stdin="output", ) if success: delete_files(["sas_plan"]) delete_files(["status.log"]) else: planner = planner_executable() success = benchmark.run( cmd=[planner]+planner_configurations.get_config(configname), timeout=timeout, memory=memory, status="status.log", stdin="output", stdout="search.log", stderr="search.err", ) if success: move_files(["sas_plan"], outdir) move_files(["search.log", "status.log"], outdir) move_optional_files(["search.err"], outdir) delete_files(PREPROCESS_OUTPUTS) delete_files(TRANSLATE_OUTPUTS) return None
def do_search(problem, configname, timeout, memory, debug=False): # TODO: Currently, do_search returns an error msg on error and # None on no error, while do_translate/do_preprocess return # True/False for success/no success. This should be unified. # Maybe throw exceptions if something goes wrong? Also, # maybe we should allow for warnings in addition to errors. # The "skipped -- dir exists" stuff should maybe just be a # warning. outdir = search_dir(problem, configname) if not debug: if os.path.exists(outdir): return "skipped [%s] %s -- dir exists" % (configname, problem) elif not os.path.exists(translate_dir(problem)): return "Translated files for %s not available." % problem elif not os.path.exists(preprocess_dir(problem)): return "Preprocessed files for %s not available." % problem if debug and not os.path.exists(translate_dir(problem)): # Do not abort if translation does not exist. Don't store search output. # (Instead, translate if necessary and always search.) print "Translating and Preprocessing..." success = do_translate(problem) if not success: return "Translating %s failed." % problem success = do_preprocess(problem) if not success: return "Preprocessing %s failed." % problem copy_files(TRANSLATE_OUTPUTS, ".", src_dir=translate_dir(problem)) copy_files(PREPROCESS_OUTPUTS, ".", src_dir=preprocess_dir(problem)) if debug: # Write planner output to screen instead of file. planner = planner_debug_executable() success = benchmark.run( cmd=[planner] + planner_configurations.get_config(configname), timeout=timeout, memory=memory, status="status.log", stdin="output", ) if success: delete_files(["sas_plan"]) delete_files(["status.log"]) else: planner = planner_executable() success = benchmark.run( cmd=[planner] + planner_configurations.get_config(configname), timeout=timeout, memory=memory, status="status.log", stdin="output", stdout="search.log", stderr="search.err", ) if success: move_files(["sas_plan"], outdir) move_files(["search.log", "status.log"], outdir) move_optional_files(["search.err"], outdir) delete_files(PREPROCESS_OUTPUTS) delete_files(TRANSLATE_OUTPUTS) return None
def do_preprocess(problem): copy_files(TRANSLATE_OUTPUTS, ".", src_dir=translate_dir(problem)) executable = preprocessor_executable() benchmark.run( cmd=[executable], status="status.log", stdin="output.sas", stdout="preprocess.log", stderr="preprocess.err", ) outdir = preprocess_dir(problem) move_files(["preprocess.log", "status.log"], outdir) delete_files(TRANSLATE_OUTPUTS) if move_optional_files(["preprocess.err"], outdir): # There was an error. return False else: move_files(PREPROCESS_OUTPUTS, outdir) return True
def run(self): thread_had_critical_error = False try: tmpdir = get_tempdir_name(self.thread_id) try: os.mkdir(tmpdir) except OSError: pass while True: try: job = queue.get_nowait() except Queue.Empty: break if not critical_errors: pid = os.fork() if not pid: # We need to fork because all threads share the same # working directory. os.chdir(tmpdir) error = run_job_func(job, self.log) if error: open("errmsg", "w").write(error) raise SystemExit os.waitpid(pid, 0) if os.listdir(tmpdir) == ["errmsg"]: error = open(joinpath(tmpdir, "errmsg")).read() self.log(error) errors.append(error) tools.delete_files(["errmsg"], tmpdir) if os.listdir(tmpdir): msg = "temp dir %s not empty" % tmpdir critical_errors.append(msg) thread_had_critical_error = True queue.task_done() if not thread_had_critical_error: shutil.rmtree(tmpdir, True) self.log("ran out of work") except KeyboardInterrupt: pass