示例#1
0
def initialize_trainer_provider(task, trainer_opts, provider_opts, _run):

    storage_dir = Path(trainer_opts['storage_dir'])
    if (storage_dir / 'init.json').exists():
        assert task in ['restart', 'validate'], task
    elif task in ['train', 'create_checkpoint']:
        dump_json(
            dict(trainer_opts=recursive_class_to_str(trainer_opts),
                 provider_opts=recursive_class_to_str(provider_opts)),
            storage_dir / 'init.json')
    else:
        raise ValueError(task, storage_dir)
    sacred.commands.print_config(_run)

    trainer = Trainer.from_config(trainer_opts)
    assert isinstance(trainer, Trainer)
    provider = config_to_instance(provider_opts)
    return trainer, provider
示例#2
0
def write_makefile_and_config(
        storage_dir,
        _config,
        _run,
        backend='yaml',
        write_config=True,
        write_makefile=True,
):
    """
    Writes a Makefile and a config file in the storage_dir to resume the
    Experiment.

    Args:
        storage_dir:
        _config:
        _run:
        backend:

    Returns:

    """
    from padertorch.configurable import recursive_class_to_str
    if backend == 'json':
        config_path = Path(storage_dir) / "config.json"
        if write_config:
            pb.io.dump_json(recursive_class_to_str(_config), config_path)
    elif backend == 'yaml':
        config_path = Path(storage_dir) / "config.yaml"
        if write_config:
            pb.io.dump_yaml(_config, config_path)
    else:
        raise ValueError(backend)

    if write_makefile:
        makefile_path = Path(storage_dir) / "Makefile"

        module_name = pt.configurable.resolve_main_python_path()

        makefile_path.write_text(
            "resume:\n"
            f"\tpython -m {module_name} "
            f"resume with {config_path.name}\n"
        )