Exemple #1
0
 def test_hparams_smoke(self):
     hp = {'lr': 0.1, 'bsize': 4}
     mt = {'accuracy': 0.1, 'loss': 10}
     summary.hparams(hp, mt)
     
     hp = {'string': "1b", 'use magic': True}
     summary.hparams(hp, mt)
Exemple #2
0
    def _try_log_hparams(self, trial: "Trial", result: Dict):
        # TBX currently errors if the hparams value is None.
        flat_params = flatten_dict(trial.evaluated_params)
        scrubbed_params = {
            k: v
            for k, v in flat_params.items()
            if isinstance(v, self.VALID_HPARAMS)
        }

        removed = {
            k: v
            for k, v in flat_params.items()
            if not isinstance(v, self.VALID_HPARAMS)
        }
        if removed:
            logger.info(
                "Removed the following hyperparameter values when "
                "logging to tensorboard: %s", str(removed))

        from tensorboardX.summary import hparams
        try:
            experiment_tag, session_start_tag, session_end_tag = hparams(
                hparam_dict=scrubbed_params, metric_dict=result)
            self._trial_writer[trial].file_writer.add_summary(experiment_tag)
            self._trial_writer[trial].file_writer.add_summary(
                session_start_tag)
            self._trial_writer[trial].file_writer.add_summary(session_end_tag)
        except Exception:
            logger.exception("TensorboardX failed to log hparams. "
                             "This may be due to an unsupported type "
                             "in the hyperparameter values.")
Exemple #3
0
 def close(self):
     if self._file_writer is not None:
         if self.trial and self.trial.evaluated_params and self.last_result:
             from tensorboardX.summary import hparams
             experiment_tag, session_start_tag, session_end_tag = hparams(
                 hparam_dict=self.trial.evaluated_params,
                 metric_dict=self.last_result)
             self._file_writer.file_writer.add_summary(experiment_tag)
             self._file_writer.file_writer.add_summary(session_start_tag)
             self._file_writer.file_writer.add_summary(session_end_tag)
         self._file_writer.close()
Exemple #4
0
    def log_args(self, args, tag=None):
        if hparams is not None:
            writer = self._writer(tag or '')

            hp, met = self._args2hparams(args)
            exp, ssi, sei = hparams(hp, met)

            writer.file_writer.add_summary(exp)
            writer.file_writer.add_summary(ssi)
            writer.file_writer.add_summary(sei)
            writer.flush()
Exemple #5
0
 def _try_log_hparams(self, result):
     # TBX currently errors if the hparams value is None.
     scrubbed_params = {
         k: v
         for k, v in self.trial.evaluated_params.items() if v is not None
     }
     from tensorboardX.summary import hparams
     experiment_tag, session_start_tag, session_end_tag = hparams(
         hparam_dict=scrubbed_params, metric_dict=result)
     self._file_writer.file_writer.add_summary(experiment_tag)
     self._file_writer.file_writer.add_summary(session_start_tag)
     self._file_writer.file_writer.add_summary(session_end_tag)
Exemple #6
0
 def add_hparams(self,
                 hparam_dict=None,
                 metric_dict=None,
                 name=None,
                 global_step=None):
     if type(hparam_dict) is not dict or type(metric_dict) is not dict:
         raise TypeError(
             'hparam_dict and metric_dict should be dictionary.')
     exp, ssi, sei = hparams(hparam_dict, metric_dict)
     self.file_writer.add_summary(exp)
     self.file_writer.add_summary(ssi)
     self.file_writer.add_summary(sei)
     for k, v in metric_dict.items():
         self.add_scalar(k, v, global_step)
 def _write_hparams(self, hparam_dict, metric_dict, name):
     # adapted from
     # https://github.com/lanpa/tensorboardX/blob/master/tensorboardX/writer.py#L336-L376
     exp, ssi, sei = hparams(hparam_dict, metric_dict)
     w_hp = SummaryWriter(
         logdir=os.path.join(self._file_writer.logdir, name))
     w_hp.file_writer.add_summary(exp)
     w_hp.file_writer.add_summary(ssi)
     w_hp.file_writer.add_summary(sei)
     for k, values in metric_dict.items():
         global_step = 0
         for v in values:
             w_hp.add_scalar(k, v, global_step)
             global_step += 1
     w_hp.close()
Exemple #8
0
 def test_hparams(self):
     hp = {'lr': 0.1}
     mt = {'accuracy': 0.1}
     compare_proto(summary.hparams(hp, mt), self)
 def test_hparams_string(self):
     hp = {'string_var': "hi"}
     mt = {'accuracy': 0.1}
     compare_proto(summary.hparams(hp, mt), self)
 def test_hparams_bool(self):
     hp = {'bool_var': True}
     mt = {'accuracy': 0.1}
     compare_proto(summary.hparams(hp, mt), self)
Exemple #11
0
 def test_hparams_smoke(self):
     hp = {'lr': 0.1, 'bsize': 4}
     mt = {'accuracy': 0.1, 'loss': 10}
     summary.hparams(hp, mt)