Ejemplo n.º 1
0
def test_checker_model_1(setup):
    checker = Checker(setup['path'])

    assert checker.model is None
    assert checker.describe is None
    assert checker.model_structure is None
    with pytest.raises(TypeError):
        checker.model = None

    checker.model = setup['model']
    assert isinstance(checker.model, LinearLayer)
    assert isinstance(checker.model_structure, str)
    assert 'LinearLayer' in checker.model_structure
    assert str(checker.model) == str(setup['model'])

    assert checker.init_state is not None
    with pytest.raises(TypeError):
        checker.init_state = None
    with pytest.raises(TypeError):
        checker.init_state = OrderedDict(a=1)
    assert isinstance(checker.init_state, OrderedDict)

    assert checker.final_state is None
    with pytest.raises(TypeError):
        checker.final_state = None
    with pytest.raises(TypeError):
        checker.init_state = OrderedDict(a=1)

    checker.final_state = setup['model'].state_dict()
    assert isinstance(checker.final_state, OrderedDict)
Ejemplo n.º 2
0
def test_checker_from_cp(setup):
    checker = Checker(setup['path'])
    path = checker.path
    checker.set_checkpoint(test_cp=setup['cp'])
    checker(a=1)
    checker2 = Checker.load(path)
    assert checker2['a'] == 1

    cp = checker2.checkpoints['test_cp']
    assert 'b' in cp
    assert cp['model_state'] == setup['cp']['model_state']
Ejemplo n.º 3
0
def test_checker_path(setup):
    checker = Checker()
    assert checker.path == str(Path('.').resolve() / Path.cwd().name)
    assert checker.model_name == Path(os.getcwd()).name

    checker = Checker(setup['path'])
    assert checker.path == str(Path(setup['path']))
    assert checker.model_name == setup['name']

    checker = Checker(setup['path'], increment=True)
    assert checker.path == str(Path(setup['path'] + '@1'))
    assert checker.model_name == setup['name'] + '@1'
Ejemplo n.º 4
0
    def load(
        cls,
        from_: Union[str, Path, Checker],
        *,
        loss_func: torch.nn.Module = None,
        optimizer: BaseOptimizer = None,
        lr_scheduler: BaseLRScheduler = None,
        clip_grad: Union[ClipNorm, ClipValue] = None,
        epochs: int = 200,
        cuda: Union[bool, str, torch.device] = False,
        non_blocking: bool = False,
    ) -> 'Trainer':
        """
        Load model for local path or :class:`xenonpy.model.training.Checker`.

        Parameters
        ----------
        from_
            Path to the model dir or :class:`xenonpy.model.training.Checker` object.
        loss_func
            Loss function.
        optimizer
            Optimizer for model parameters tuning.
        lr_scheduler
            Learning rate scheduler.
        clip_grad
            Clip grad before each optimize.
        epochs
            Number of iterations.
        cuda
            Set training device(s).
        non_blocking
            When non_blocking is ``True``,
            it tries to convert/move asynchronously with respect to the host if possible.

        Returns
        -------

        """
        if isinstance(from_, (str, Path)):
            checker = Checker(from_)
        else:
            checker = from_
        if len(checker.files) == 0:
            raise RuntimeError(f'{checker.path} is not a model dir')

        tmp = cls(model=checker.model,
                  cuda=cuda,
                  loss_func=loss_func,
                  optimizer=optimizer,
                  lr_scheduler=lr_scheduler,
                  clip_grad=clip_grad,
                  epochs=epochs,
                  non_blocking=non_blocking)
        tmp._training_info = checker.training_info.to_dict(orient='records')
        if Path(checker.path + '/checkpoints').is_dir():
            for k in checker.checkpoints.files:
                tmp._checkpoints[k] = cls.checkpoint_tuple(
                    **checker.checkpoints[k])
        return tmp
Ejemplo n.º 5
0
 def before_proc(self, trainer: Trainer) -> None:
     self._checker = Checker(self._path, increment=self._increment)
     if self._model_class is not None:
         self._checker(model_class=self._model_class)
     if self._model_params is not None:
         self._checker(model_params=self._model_params)
     self._checker.model = trainer.model
     self._describe_ = dict(
         python=py_ver,
         system=sys_ver(),
         numpy=np.__version__,
         torch=torch.__version__,
         xenonpy=__version__,
         device=str(trainer.device),
         start=datetime.now().strftime('%Y/%m/%d %H:%M:%S'),
         finish='N/A',
         time_elapsed='N/A',
         **self._describe,
     )
     self._checker(describe=self._describe_)
Ejemplo n.º 6
0
def test_checker_call(setup):
    checker = Checker(setup['path'])
    assert checker['no exists'] is None
    checker.set_checkpoint(**setup['cp'])
    assert (Path(checker.path) / 'checkpoints').exists()