""" Callback Extension Example ================================ This example should illustrate how to extend the training using simple callbacks. In particular we will modulate the learning rate with a sawtooth function and clip the gradients by value """ ################################################## # change directories to your needs from inferno.trainers.callbacks.logging.tensorboard import TensorboardLogger from inferno.utils.python_utils import ensure_dir LOG_DIRECTORY = ensure_dir('log/sawtooth') SAVE_DIRECTORY = ensure_dir('save') DATASET_DIRECTORY = ensure_dir('dataset') print("\n \n LOGDIR", LOG_DIRECTORY) ################################################## # shall models be downloaded DOWNLOAD_CIFAR = True USE_CUDA = True ################################################## # Build torch model import torch.nn as nn from inferno.extensions.layers import ConvELU2D from inferno.extensions.layers import Flatten model = nn.Sequential( ConvELU2D(in_channels=3, out_channels=256, kernel_size=3), nn.MaxPool2d(kernel_size=2, stride=2),
from inferno.trainers.callbacks.logging.tensorboard import TensorboardLogger import torch import torch.nn as nn import torch.optim as optim import torch.utils.data import numpy as np import torch.nn.functional as F import torchvision.transforms as transforms import warnings warnings.filterwarnings("ignore", category=UserWarning) from inferno.utils.python_utils import ensure_dir LOG_DIRECTORY = ensure_dir('./logs_2') BATCHSIZE = 8 N_DIRECTIONS = 8 # unsq = transforms.Lambda(lambda x: torch.unsqueeze(x, 0)) transpose = transforms.Lambda(lambda x: torch.transpose(x, 0, 1)) squeeze = transforms.Lambda(lambda x: torch.squeeze(x, 1)) fromnumpy = transforms.Lambda(lambda x: torch.from_numpy(x)) trans = transforms.Compose([fromnumpy]) trans2 = transforms.Compose([fromnumpy, squeeze]) imageset_train = HDF5VolumeLoader(path='./train-volume.h5', path_in_h5_dataset='data', transforms=trans, **yaml2dict('config_train.yml')['slicing_config'])