def __init__(self, batch_size, every_n_steps=1, metric_logger=None): self._last_recorded_time = time.time() self._train_start_time = time.time() self._batch_size = batch_size self._every_n_steps = every_n_steps self._logger = metric_logger or logger.BaseBenchmarkLogger() self._global_step = 0 # Initialize it in __init__ super(ExamplesPerSecondCallback, self).__init__()
def test_log_metric(self): log = logger.BaseBenchmarkLogger() log.log_metric("accuracy", 0.999, global_step=1e4, extras={"name": "value"}) expected_log_prefix = "Benchmark metric:" self.assertRegexpMatches(str(self.logged_message), expected_log_prefix)
def __init__(self, batch_size, every_n_steps=None, every_n_secs=None, warm_steps=0, metric_logger=None): """Initializer for ExamplesPerSecondHook. Args: batch_size: Total batch size across all workers used to calculate examples/second from global time. every_n_steps: Log stats every n steps. every_n_secs: Log stats every n seconds. Exactly one of the `every_n_steps` or `every_n_secs` should be set. warm_steps: The number of steps to be skipped before logging and running average calculation. warm_steps steps refers to global steps across all workers, not on each worker metric_logger: instance of `BenchmarkLogger`, the benchmark logger that hook should use to write the log. If None, BaseBenchmarkLogger will be used. Raises: ValueError: if neither `every_n_steps` or `every_n_secs` is set, or both are set. """ if (every_n_steps is None) == (every_n_secs is None): raise ValueError("exactly one of every_n_steps" " and every_n_secs should be provided.") self._logger = metric_logger or logger.BaseBenchmarkLogger() self._timer = tf.estimator.SecondOrStepTimer(every_steps=every_n_steps, every_secs=every_n_secs) self._step_train_time = 0 self._total_steps = 0 self._batch_size = batch_size self._warm_steps = warm_steps # List of examples per second logged every_n_steps. self.current_examples_per_sec_list = []
def __init__(self, metric_logger=None): self._logger = metric_logger or logger.BaseBenchmarkLogger() self._per_batch_metrics = _PER_BATCH_METRICS self._per_epoch_metrics = _PER_EPOCH_METRICS self._global_step = 0 # Initialize it in __init__ super(LoggingMetricCallback, self).__init__()