コード例 #1
0
 def __init__(self,
              logdir: str,
              fps=1,
              window=10,
              exp_start_time: float = None):
     """
     :param logdir: path to some directory, where all file storing methods write their files to,
         placeholder {t} is automatically replaced by the start time of the training
     :param fps: the rate in which the logger prints data on the console (frames per second)
     :param window: the memory depth of log data, i.e. the logger only
         maintains the top-k most recent batches in the .log method. At the end of an epoch the mean of
         those top-k batches is additionally saved.
     :param exp_start_time: start time of the overall training, just used for
         replacing {t} in logdir with it, defaults to the start time of this logger
     """
     self.start = int(time.time())
     self.exp_start_time = self.start if exp_start_time is None else exp_start_time
     self.dir = logdir.replace('{t}', time_format(self.exp_start_time))
     if not pt.exists(os.path.dirname(self.dir)):
         os.makedirs(os.path.dirname(self.dir))
     self.t = time.time()
     self.fps = fps
     self.history = defaultdict(DefaultList)
     self.history['err_all'] = CircleList(window)
     self.__window = window
     self.__lastepoch = 0
     self.__further_keys = []
     self.config_outfile = None
     self.logtxtfile = None
     self.loggingtxt = ''
     self.printlog = ''
     self.__warnings = []
コード例 #2
0
 def log_info(self, info: dict, epoch: int = None):
     """
     Logs a dictionary of metrics (unique name -> scalar value) {str -> Tensor} in CircleLists.
     Does not compute an average at the end of an epoch. This is done in the .log method.
     :param info: dictionary of metrics that are to be maintained like the loss.
     :param epoch: current epoch
     """
     for k, v in info.items():
         if k not in self.__further_keys:
             if '{}_all'.format(k) in self.history:
                 raise ValueError(
                     '{} is already part of the history.'.format(k))
             self.history['{}_all'.format(k)] = CircleList(self.__window)
             self.__further_keys.append(k)
         self.history['{}_all'.format(k)].append(v.data.item())
         if epoch is not None:
             self.history[k][epoch] = np.mean(
                 self.history['{}_all'.format(k)])
コード例 #3
0
 def reset(self, logdir: str = None, exp_start_time: float = None):
     """
     Resets all stored information. Also sets the start time
     :param logdir: sets a new logdir, defaults to None, which means keeping the old one
     :param exp_start_time: start time of the overall training, just used for
         replacing {t} in logdir with it (if logdir is not None), defaults to the old start time
     """
     self.start = int(time.time())
     self.exp_start_time = self.exp_start_time if exp_start_time is None else exp_start_time
     self.t = time.time()
     self.history = defaultdict(DefaultList)
     self.history['err_all'] = CircleList(self.__window)
     self.history['err'] = DefaultList()
     self.__lastepoch = 0
     self.__further_keys = []
     self.logtxtfile = None
     self.loggingtxt = ''
     self.log_prints()
     self.__warnings = []
     if logdir is not None:
         self.dir = logdir.replace('{t}', time_format(self.exp_start_time))
         if not pt.exists(os.path.dirname(self.dir)):
             os.makedirs(os.path.dirname(self.dir))