def real_run(args, opt_file_lookup, run_uuid, timeout=None): # pragma: io """Run sequence of independent experiments to fully run the benchmark. This uses `subprocess` to launch a separate process (in serial) for each experiment. Parameters ---------- args : dict(CmdArgs, [int, str]) Arguments of options to pass to the experiments being launched. The keys corresponds to the same arguments passed to this program. opt_file_lookup : dict(str, str) Mapping from method name to filename containing wrapper class for the method. run_uuid : uuid.UUID UUID for this launcher run. Needed to generate different experiments UUIDs on each call. This function is deterministic provided the same `run_uuid`. timeout : int Max seconds per experiment """ args[CmdArgs.db] = XRSerializer.init_db(args[CmdArgs.db_root], db=args[CmdArgs.db], keys=EXP_VARS, exist_ok=True) logger.info("Supply --db %s to append to this experiment or reproduce jobs file." % args[CmdArgs.db]) # Get and run the commands in a sub-process counter = 0 G = gen_commands(args, opt_file_lookup, run_uuid) for _, full_cmd in G: try: status = call(full_cmd, shell=False, cwd=args[CmdArgs.optimizer_root], timeout=timeout) if status != 0: raise ChildProcessError("status code %d returned from:\n%s" % (status, " ".join(full_cmd))) except TimeoutExpired: logger.info(f"Experiment timeout after {timeout} seconds.") print(json.dumps({"experiment_timeout_exception": " ".join(full_cmd)})) counter += 1 logger.info(f"Benchmark script ran {counter} studies successfully.")
def main(): """See README for instructions on calling db_init. """ description = "Initialize the directories for running the experiments" args = cmd.parse_args(cmd.general_parser(description)) assert not args[ CmdArgs.dry_run], "Dry run doesn't make any sense when building dirs" logger.setLevel(logging.INFO) # Note this is the module-wide logger if args[CmdArgs.verbose]: logger.addHandler(logging.StreamHandler()) XRSerializer.init_db(args[CmdArgs.db_root], db=args[CmdArgs.db], keys=EXP_VARS, exist_ok=EXIST_OK) logger.info("done")
def real_run(args, opt_file_lookup, run_uuid): # pragma: io """Run sequence of independent experiments to fully run the benchmark. This uses `subprocess` to launch a separate process (in serial) for each experiment. Parameters ---------- args : dict(CmdArgs, [int, str]) Arguments of options to pass to the experiments being launched. The keys corresponds to the same arguments passed to this program. opt_file_lookup : dict(str, str) Mapping from method name to filename containing wrapper class for the method. run_uuid : uuid.UUID UUID for this launcher run. Needed to generate different experiments UUIDs on each call. This function is deterministic provided the same `run_uuid`. """ args[CmdArgs.db] = XRSerializer.init_db(args[CmdArgs.db_root], db=args[CmdArgs.db], keys=EXP_VARS, exist_ok=True) logger.info( "Supply --db %s to append to this experiment or reproduce jobs file." % args[CmdArgs.db]) # Get and run the commands in a sub-process ran, failed = 0, 0 G = gen_commands(args, opt_file_lookup, run_uuid) for _, full_cmd in G: status = call(full_cmd, shell=False, cwd=args[CmdArgs.optimizer_root]) ran += 1 if status != 0: failed += 1 warnings.warn( "status code %d returned from:\n%s" % (status, " ".join(full_cmd)), RuntimeWarning) logger.info("%d failures of benchmark script after %d studies." % (failed, ran))