Exemplo n.º 1
0
    def test_rewind_initialization(self, dataset_name, model_name, init_schema,
                                   optimizer_name, tmp_path):
        """Tests that a rewounded model matches the one found in root_path.
        Gets the first parameter of the model, modifies it manually, and then
        tries to rewind it.
        """
        experiment_dir = tmp_path / 'experiment'
        experiment = Experiment(directory=experiment_dir)

        # create a new experiment state
        initial_state = experiment.spawn_new_tree(
            dataset_name=dataset_name,
            model_name=model_name,
            init_schema=init_schema,
            optimizer_name=optimizer_name,
            seed=3,
        ).get()

        # get first parameter of the model at initializatiion
        param_name, param = next(initial_state.model.named_parameters())
        init_param = param.clone()

        # reset the parameter manually and check it's different
        _, param = next(initial_state.model.named_parameters())
        torch.nn.init.constant_(param, val=3.0)
        assert not torch.equal(init_param, param)

        # rewind initialization and check the param went back to its original
        # state
        initial_state.experiment_object.root = initial_state  # mock
        rewind_initialization(param, param_name, initial_state)
        _, rewound_param = next(initial_state.model.named_parameters())
        assert torch.equal(init_param, rewound_param)
Exemplo n.º 2
0
def test_spawn_new_tree(dataset_name, model_name, init_schema, optimizer_name,
                        tmp_path):
    # create a new experiment tree and check that the directory is created
    experiment_dir = tmp_path / 'experiment'
    experiment = Experiment(directory=experiment_dir)
    assert experiment_dir.is_dir()

    initial_state = experiment.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=1,
    ).get()

    # ensure that the state was created and saved to disk
    assert experiment.root.path
    assert experiment.root.path.parent == experiment_dir
    assert experiment.root.path.exists()

    # check that reloading works and that the loaded model matches the
    # initial model layer by layer
    loaded = ExperimentState.load(experiment.root.path)

    for layer, params in initial_state.model.state_dict().items():
        assert (params == loaded.model.state_dict()[layer]).all()

    # ugly stuff to see if optimizers match.
    # this is mostly due to the optimizer's state_dict saving the IDs of the
    # parameters it acts on, and these not remaining identical over a copy
    # of the experiment.
    for key, item in initial_state.optimizer.state_dict().items():
        if key == "param_groups":
            for id_, param_group in enumerate(item):
                for param_name, param_val in param_group.items():
                    if param_name == "params":
                        assert len(param_val) == len(
                            loaded.optimizer.state_dict()["param_groups"][id_]
                            ["params"])
                    else:
                        assert (param_val == loaded.optimizer.state_dict()
                                ["param_groups"][id_][param_name])
        else:
            assert item == loaded.optimizer.state_dict()[key]
Exemplo n.º 3
0
    def test_recipe_seed(self, dataset_name, model_name, init_schema,
                         optimizer_name, tmp_path):
        experiment_dir = tmp_path / 'experiment'
        experiment = Experiment(directory=experiment_dir)

        initial_state = experiment.spawn_new_tree(
            dataset_name=dataset_name,
            model_name=model_name,
            init_schema=init_schema,
            optimizer_name=optimizer_name,
            seed=999,
        )
        assert initial_state.get().seed == 999

        r = Recipe(train={"n_epochs": 1})
        assert r.seed is None

        new_state = r(initial_state)
        assert r.seed == initial_state.get().seed
Exemplo n.º 4
0
def test_reload_state_by_recipe(dataset_name, model_name, init_schema,
                                optimizer_name, tmp_path):
    # create a new experiment tree and check that the directory is created
    experiment_dir = tmp_path / 'experiment'
    experiment = Experiment(directory=experiment_dir)
    assert experiment_dir.is_dir()

    initial_state = experiment.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=0,
    ).get()

    # ensure that the state was created and saved to disk
    assert experiment.root.path
    assert experiment.root.path.parent == experiment_dir
    assert experiment.root.path.exists()

    # reload by specifying same recipe
    loaded_experiment = Experiment(directory=experiment_dir)
    loaded_state = loaded_experiment.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=0,
    ).get()

    assert initial_state.filename() == loaded_state.filename()
    assert loaded_state.from_cache
Exemplo n.º 5
0
def test_seed_in_spawn(dataset_name, model_name, init_schema, optimizer_name,
                       tmp_path):
    """Test that two different random seeds actually produce two different
    ExperimentStates and that these can be differentiated using their
    unique identifier contained in their `filename`. This implies that the
    seed is being correctly taken into consideration when producing the hash.
    """
    experiment_dir = tmp_path / 'experiment'
    experiment = Experiment(directory=experiment_dir)

    initial_state = experiment.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=0,
    ).get()

    experiment_dir2 = tmp_path / 'experiment22'
    experiment2 = Experiment(directory=experiment_dir2)

    initial_state2 = experiment2.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=1,
    ).get()

    assert initial_state.filename() != initial_state2.filename()
Exemplo n.º 6
0
def test_no_double_spawn(dataset_name, model_name, init_schema, optimizer_name,
                         tmp_path):
    """Test that a tree (Experiment) cannot have two roots.
    """
    experiment_dir = tmp_path / 'experiment'
    experiment = Experiment(directory=experiment_dir)

    initial_state = experiment.spawn_new_tree(
        dataset_name=dataset_name,
        model_name=model_name,
        init_schema=init_schema,
        optimizer_name=optimizer_name,
        seed=0,
    ).get()

    # test that you can't add a new root to the same folder
    with pytest.raises(RuntimeError):
        second_initial_state = experiment.spawn_new_tree(
            dataset_name=dataset_name,
            model_name=model_name,
            init_schema=init_schema,
            optimizer_name=optimizer_name,
            seed=1,
        ).get()
Exemplo n.º 7
0
    def test_zhou_initialization(self, dataset_name, model_name, init_schema,
                                 optimizer_name, tmp_path):
        """Tests zhou initialization of model
        """
        experiment_dir = tmp_path / 'experiment'
        experiment = Experiment(directory=experiment_dir)

        # create a new experiment state
        initial_state = experiment.spawn_new_tree(
            dataset_name=dataset_name,
            model_name=model_name,
            init_schema=init_schema,
            optimizer_name=optimizer_name,
            seed=2,
        ).get()

        init_schema = eval(open(init_schema).read())

        parameter_names = np.unique(
            [".".join((k1, k2)) for k1, k2 in init_schema.keys()])
        params = [
            get_matching_parameter(param_name, initial_state.model)
            for param_name in parameter_names
        ]
        const_sign = [
            param.std().item() * torch.sign(param.data) for param in params
        ]

        initial_state.experiment_object.root = initial_state  # mock

        for param_name, param, const_layer in zip(parameter_names, params,
                                                  const_sign):
            zhou_initialization(param, param_name, initial_state)
            assert torch.equal(
                const_layer,
                get_matching_parameter(param_name, initial_state.model))
Exemplo n.º 8
0
import sys
sys.path.append("..")

import logging
from dag import Experiment, Recipe
import dill
import os
import utils

logger = logging.getLogger("main")
utils.setup_logging(debug=True)

directory = "../output/08-06-19_seed1"
experiment = Experiment(directory=directory)

# this materializes immediately
x = experiment.spawn_new_tree(
    dataset_name="mnist",
    model_name="models.LeNet",
    init_schema="",
    optimizer_name="sgd",
    seed=1,
)

x = Recipe(train={"n_epochs": 30})(x)

for _ in range(20):
    # finetune
    pruned = Recipe(
        prune_schema="../schemas/pruning_schema_lenet_structuredrandom.py", )(
            x)
import sys
sys.path.append("..")

import logging
from dag import Experiment, Recipe
import dill
import os
import utils

logger = logging.getLogger("main")
utils.setup_logging(debug=True)

directory = "../output/alexnet_08-22-19_seed0"
experiment = Experiment(directory=directory)

# this materializes immediately
x = experiment.spawn_new_tree(
    dataset_name="cifar-10",
    model_name="torchvision.models.AlexNet",
    init_schema="",
    optimizer_name="sgd",
    seed=0,
)

x = Recipe(
    train={"n_epochs": 30}
)(x)

for _ in range(20):
    pruned = Recipe(
    	prune_schema="../schemas/pruning_schema_alexnet_unstructuredl1.py", #"../schemas/pruning_schema_lenet_unstructuredl1.py",
Exemplo n.º 10
0
import logging

import torch

from dag import Experiment, Recipe
from utils import setup_logging

setup_logging(debug=True)

experiment = Experiment(directory="./end-to-end-experiment")

# This materializes immediately
initial_state = experiment.spawn_new_tree(
    dataset_name="mnist",
    model_name="models.LeNet",
    init_schema="schemas/init_kaiminguniform_lenet.py",
    optimizer_name="sgd",
    seed=0,
    device=torch.device("cpu"),
)

# These are computed lazily until .run() is called. A Recipe represents a
# way to transition from one state (read: model) to the following via
# pruning, finetuning, and reinitializing
state1 = Recipe(
    train={"n_epochs": 1},
    prune_schema="schemas/pruning_schema_lenet_unstructuredl1.py",
)(initial_state)

state2 = Recipe(reinit_schema="schemas/reinit_schema_lenet.py")(state1)