def load(identifier: str) -> "Problem": directory = os.path.join(get_rootdir(), Problem.store_dir) with open(os.path.join(directory, identifier), "rb") as f: restored = dill.load(f) assert isinstance(restored, Problem) return restored
def checkpoint(self): path = os.path.join(get_rootdir(), self.checkpoints_folder, str(self.problem)) os.makedirs(path, exist_ok=True) timestamp = datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") with open(os.path.join(path, timestamp), "wb") as f: dill.dump(self, f)
def save(self) -> str: folder = os.path.join(get_rootdir(), self.store_dir) os.makedirs(folder, exist_ok=True) identifier = str(self) existing = os.listdir(folder) if identifier in existing: identifier += str(uuid.uuid4()) with open(os.path.join(folder, identifier), "wb") as f: dill.dump(self, f) print(f"Stored problem as '{identifier}' at {folder}") return identifier
def restore_latest(problem: Problem) -> "Solver": directory = os.path.join(get_rootdir(), Solver.checkpoints_folder, str(problem)) try: checkpoints = os.listdir(directory) except FileNotFoundError: raise BlackoptException( f"The checkpoint directory {directory} doesn't exist. Were any checkpoints made?" ) else: if len(checkpoints) == 0: raise BlackoptException( f"No checkpoints found in directory {directory}") else: cp = sorted(checkpoints)[-1] with open(os.path.join(directory, cp), "rb") as f: restored = dill.load(f) assert isinstance(restored, Solver) return restored
def generate_report(problem: Problem, metrics: Dict[SolverFactory, Dict[str, Metric]]): """ plot multiple curves from the metrics list""" timestamp = datetime.datetime.now().strftime("%m-%d_%H-%M-%S") problem_path = os.path.join(get_rootdir(), "reports", str(problem)) m_groups = defaultdict(list) for sf, ms_dict in metrics.items(): for key, m in ms_dict.items(): m.discard_warmup(0.15) m_groups[key].append(m) for key, ms in m_groups.items(): plot_group( ms, f"{problem_path}@{timestamp}", name=key, stdev_factor=0.1, smoothen=False, )
def get_logger(name="blackopt", logdir=LOGDIR, **initial_values): path = os.path.join(get_rootdir(), logdir, name) os.makedirs(os.path.dirname(path), exist_ok=True) def rotating_file_handler(log_path): return RotatingFileHandler( log_path, mode="a", maxBytes=BACKUP_COUNT * MAX_FILE_SIZE, backupCount=BACKUP_COUNT, ) file_hdlr = rotating_file_handler(path) if os.path.exists(path): try: if os.stat(path).st_size != 0: file_hdlr.doRollover() except FileNotFoundError: # Rollover failed. prefer to overwrite old log instead terminating Moonfish. file_hdlr = rotating_file_handler(path) logger = logging.getLogger(name) logger.addHandler(file_hdlr) logger.setLevel(logging.INFO) log = wrap_logger( logger, processors=[ # filter_by_level, TimeStamper(fmt="iso"), JSONRenderer(sort_keys=True), ], **initial_values) return log
def with_tmp_root(tmpdir): rootdir = get_rootdir() set_rootdir(tmpdir) yield set_rootdir(rootdir)