def main(cfg):
    shapes = cfg.model.shapes
    opt_params = cfg.optimizer.params

    experiment = Experiment(log_code=False)
    experiment.set_code(filename=hydra.utils.to_absolute_path(__file__))
    experiment.add_tag("with_hydra")
    experiment.log_parameters({"hydra-cfg": [cfg]})
    model = layers.MLP(shapes)
    optimizer = optim.Adam(model.parameters(), **opt_params)
    runner = tasks.ClassificationRunner(
        model,
        optimizer=optimizer,
        criterion=nn.CrossEntropyLoss(),
        experiment=experiment
    )
    runner.fit(x, y, epochs=10, checkpoint_path="./checkpoints")
    runner.save()
Example #2
0
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.optim.lr_scheduler import ExponentialLR, CosineAnnealingLR

from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split

from enchanter import addons
from enchanter import tasks
from enchanter.addons import layers
from enchanter.engine.modules import fix_seed, get_dataset

fix_seed(0)

experiment = Experiment()
model = layers.MLP([4, 512, 128, 3], addons.mish)
optimizer = optim.Adam(model.parameters())
runner = tasks.ClassificationRunner(model,
                                    optimizer=optimizer,
                                    criterion=nn.CrossEntropyLoss(),
                                    experiment=experiment,
                                    scheduler=[
                                        CosineAnnealingLR(optimizer,
                                                          T_max=10,
                                                          eta_min=1e-10),
                                        ExponentialLR(optimizer, gamma=0.9),
                                    ])

x, y = load_iris(return_X_y=True)
x = x.astype("float32")
y = y.astype("int64")
x, y = load_boston(return_X_y=True)
y = y.reshape(-1, 1)
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=0)
x_train, x_val, y_train, y_val = train_test_split(x_train,
                                                  y_train,
                                                  random_state=0)

train_ds = get_dataset(x_train.astype(np.float32), y_train.astype(np.float32))
val_ds = get_dataset(x_val.astype(np.float32), y_val.astype(np.float32))
test_ds = get_dataset(x_test.astype(np.float32), y_test.astype(np.float32))

train_loader = DataLoader(train_ds, batch_size=32, shuffle=True)
val_loader = DataLoader(val_ds, batch_size=32, shuffle=False)
test_loader = DataLoader(test_ds, batch_size=32, shuffle=False)

model = layers.MLP([13, 512, 128, 1], Mish())
optimizer = optim.Adam(model.parameters())


def test_regression_1():
    runner = tasks.RegressionRunner(
        model, optimizer, nn.MSELoss(),
        OfflineExperiment(offline_directory="./logs", display_summary_level=0))
    runner.add_loader("train",
                      train_loader).add_loader("val", val_loader).add_loader(
                          "test", test_loader)
    runner.train_config(epochs=1)

    try:
        runner.run(verbose=True)
        is_pass = True
Example #4
0
import torch.optim as optim
from sklearn.datasets import load_iris
import enchanter.tasks as tasks
import enchanter.addons as addons       # pylint: disable=W0611
import enchanter.addons.layers as layers
from enchanter.utils import comet

config = comet.TunerConfigGenerator(
    algorithm="bayes",
    metric="train_avg_loss",
    objective="minimize",
    seed=0,
    trials=5
)

config.suggest_categorical("activation", ["addons.mish", "torch.relu", "torch.sigmoid"])

opt = Optimizer(config.generate())

for experiment in opt.get_experiments():
    model = layers.MLP([4, 512, 128, 3], eval(experiment.get_parameter("activation")))
    optimizer = optim.Adam(model.parameters())
    runner = tasks.ClassificationRunner(
        model, optimizer=optimizer, criterion=nn.CrossEntropyLoss(), experiment=experiment
    )
    x, y = load_iris(return_X_y=True)
    x = x.astype("float32")
    y = y.astype("int64")

    runner.fit(x, y, epochs=1, batch_size=32)