def test_optimizer_state(self, tmp_path: Path, xor_trial_controller: Callable) -> None: def make_trial_controller_fn( workloads: workload.Stream, load_path: Optional[str] = None ) -> det.TrialController: hparams = {**self.hparams, "optimizer": "adam"} return xor_trial_controller(hparams, workloads, load_path=load_path) utils.optimizer_state_test(make_trial_controller_fn, tmp_path)
def test_optimizer_state(self, tmp_path: pathlib.Path) -> None: def make_trial_controller_fn( workloads: workload.Stream, load_path: typing.Optional[str] = None) -> det.TrialController: return utils.make_trial_controller_from_trial_implementation( trial_class=pytorch_xor_model.XORTrialOptimizerState, hparams=self.hparams, workloads=workloads, load_path=load_path, trial_seed=self.trial_seed, ) utils.optimizer_state_test(make_trial_controller_fn, tmp_path)
def test_optimizer_state(self, tmp_path: Path, xor_trial_controller: Callable) -> None: def make_trial_controller_fn( workloads: workload.Stream, load_path: Optional[str] = None) -> det.TrialController: return xor_trial_controller( self.hparams, workloads, batches_per_step=100, load_path=load_path, trial_seed=self.trial_seed, ) utils.optimizer_state_test(make_trial_controller_fn, tmp_path)