def start_train(self, global_step: int, session: tf.Session): """ ## Start experiment Load a checkpoint or reset based on `global_step`. """ self.trial.start_step = global_step self._log_trial(is_add=True) self._log_python_file() if global_step > 0: # load checkpoint if we are starting from middle with self.logger.monitor("Loading checkpoint") as m: m.is_successful = self.load_checkpoint_numpy(session) else: # initialize variables and clear summaries if we are starting from scratch with self.logger.monitor("Clearing summaries"): self.clear_summaries() with self.logger.monitor("Clearing checkpoints"): self.clear_checkpoints() with self.logger.monitor("Initializing variables"): tf_util.init_variables(session) self.create_writer(session)
def start_train(self, session: tf.Session, is_init: bool = True): """ ## Start experiment Load a checkpoint or reset based on `global_step`. """ global_step = 0 if not is_init: # load checkpoint if we are starting from middle with self.logger.section("Loading checkpoint") as m: is_successful = self.__checkpoint_saver.load(session) self.logger.set_successful(is_successful) if is_successful: global_step = self.__checkpoint_saver.max_step self.trial.start_step = global_step self._start() if global_step == 0: # initialize variables and clear summaries if we are starting from scratch with self.logger.section("Clearing summaries"): self.clear_summaries() with self.logger.section("Clearing checkpoints"): self.clear_checkpoints() with self.logger.section("Initializing variables"): tf_util.init_variables(session) self.create_writer(session)
def load_checkpoint(self, session: tf.Session): """ Load latest TensorFlow checkpoint """ if not _load_checkpoint(session, self.checkpoint_path): tf_util.init_variables(session) return False else: return True
def load_checkpoint(self, session: tf.Session): """ ## Load latest TensorFlow checkpoint **Use numpy array saving.** It's simpler and you can easily load subsets of variable. Or even manually swap variables between experiments with just file copies to try things out. """ if not _load_checkpoint(session, str(self.info.checkpoint_path)): tf_util.init_variables(session) return False else: return True
def start(self, global_step: int, session: tf.Session): """ Start by either by loading a checkpoint or resetting. """ if global_step > 0: # load checkpoint if we are starting from middle with self.logger.monitor("Loading checkpoint") as m: m.is_successful = self.load_checkpoint(session) else: # initialize variables and clear summaries if we are starting from scratch with self.logger.monitor("Clearing summaries"): self.clear_summaries() with self.logger.monitor("Clearing checkpoints"): self.clear_checkpoints() with self.logger.monitor("Initializing variables"): tf_util.init_variables(session) self.create_writer(session)