def get_dataset(self, dataset, pin_memory=True, drop_last=True, ratio=0.2):
        """
        Init train-, testset and train-, testloader for experiment.

        :param dataset: string that describe which dataset to use for training. Current Options: "mnist", "cifar"
        :param pin_memory: If True, the data loader will copy tensors into CUDA pinned memory before returning them
        :param drop_last: If true, drop the last incomplete batch, if the dataset is not divisible by the batch size
        :param ratio: Only necessary if dataset is "artset". Ratio of train- and testset by which Painter by Numbers
        dataset should be divided by.
        :return: None
        """
        if dataset == "mnist":
            self.trainset, self.testset, self.classes = dl.load_mnist()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size,
                                             pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size,
                                            pin_memory, drop_last)
        elif dataset == "cifar":
            self.trainset, self.testset, self.classes = dl.load_cifar()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size,
                                             pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size,
                                            pin_memory, drop_last)
        elif dataset == "artset":
            self.dataset, self.classes = dl.load_artset(
                self.use_genre, self.subset)
            self.trainloader, self.testloader = dl.split_dataset(
                self.dataset, ratio, self.batch_size, pin_memory, drop_last)
        else:
            print("The requested dataset is not implemented yet.")
Ejemplo n.º 2
0
    def get_dataset(self, dataset, pin_memory=True, drop_last=True):
        """
        Init train-, testset and train-, testloader for experiment. Furthermore criterion will be initialized.

        :param dataset: string that describe which dataset to use for training. Current Options: "mnist", "cifar"
        :param pin_memory: If True, the data loader will copy tensors into CUDA pinned memory before returning them
        :param drop_last: If true, drop the last incomplete batch, if the dataset is not divisible by the batch size
        """
        if dataset == "mnist":
            self.trainset, self.testset, self.classes = dl.load_mnist()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size, pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size, pin_memory, drop_last)
        elif dataset == "cifar":
            self.trainset, self.testset, self.classes = dl.load_cifar()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size, pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size, pin_memory, drop_last)
        elif dataset == "artset":
            self.dataset, self.classes = dl.load_artset(self.use_genre, self.subset)
            self.trainloader, self.testloader = dl.split_dataset(self.dataset, 0.2, self.batch_size, pin_memory,
                                                                 drop_last)
        else:
            print("The requested dataset is not implemented yet.")

        img, _ = next(iter(self.trainloader))
        img = img.to(self.device)
        lat_img = self.model(img)
        self.lat_shape = lat_img.shape

        self.num_classes = len(self.classes)
        self.criterion = il.INN_loss(self.num_classes, self.sigma, self.device, self.batch_size, self.likelihood, self.classification, self.zero_pad, self.conditional)
    def get_dataset(self, dataset='imagenet', pin_memory=True, num_workers=8):
        """
        Init train-, testset and train-, testloader for experiment. Furthermore criterion will be initialized.

        :param dataset: string that describe which dataset to use for training. Current Options: "mnist", "cifar"
        :param pin_memory: If True, the data loader will copy tensors into CUDA pinned memory before returning them
        :param drop_last: If true, drop the last incomplete batch, if the dataset is not divisible by the batch size
        """
        print()
        print("Loading Dataset: {}".format(dataset))
        self.num_workers = 8

        if dataset == "imagenet":
            self.dataset, self.classes = dl.load_imagenet()
            self.trainloader, self.testloader = dl.split_dataset(
                self.dataset,
                0.2,
                self.batch_size,
                pin_memory,
                num_workers=num_workers)
        elif dataset == "cifar":
            self.trainset, self.testset, self.classes = dl.load_cifar()
            self.trainloader = dl.get_loader(self.trainset,
                                             self.batch_size,
                                             pin_memory,
                                             shuffle=True,
                                             num_workers=num_workers)
            self.testloader = dl.get_loader(self.testset,
                                            self.batch_size,
                                            pin_memory,
                                            shuffle=False,
                                            num_workers=num_workers)
        else:
            print("The requested dataset is not implemented yet.")
            print("Possible options are: imagenet and cifar.")

        self.num_classes = len(self.classes)

        self.inn = gc.GenerativeClassifier(init_latent_scale=self.mu_init,
                                           lr=self.lr_init,
                                           dims=self.dims,
                                           n_classes=self.num_classes,
                                           use_vgg=self.use_vgg).to(
                                               self.device)
        self.scheduler = torch.optim.lr_scheduler.MultiStepLR(
            self.inn.optimizer, milestones=self.milestones, gamma=0.1)

        print("Finished!")
Ejemplo n.º 4
0
    def get_dataset(self, dataset, pin_memory=True, drop_last=True):
        """
        Init train-, testset and train-, testloader for experiment.

        :param dataset: string that describe which dataset to use for training. Current Options: "mnist", "cifar"
        :param pin_memory: If True, the data loader will copy tensors into CUDA pinned memory before returning them
        :param drop_last: If true, drop the last incomplete batch, if the dataset is not divisible by the batch size
        """
        if dataset == "mnist":
            self.trainset, self.testset, self.classes = dl.load_mnist()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size, pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size, pin_memory, drop_last)
        elif dataset == "cifar":
            self.trainset, self.testset, self.classes = dl.load_cifar()
            self.trainloader = dl.get_loader(self.trainset, self.batch_size, pin_memory, drop_last)
            self.testloader = dl.get_loader(self.testset, self.batch_size, pin_memory, drop_last)
        else:
            print("The requested dataset is not implemented yet.")
Ejemplo n.º 5
0
import torch.nn as nn
from functionalities import dataloader as dl
from functionalities import evaluater as ev
from functionalities import filemanager as fm
from functionalities import trainer as tr
from functionalities import plot as p
from architecture import RotNet as RN

trainset, testset, classes = dl.load_cifar("./datasets")
trainloader, validloader, testloader = dl.make_dataloaders(
    trainset, testset, 128)

criterion = nn.CrossEntropyLoss()

# set rot classes
rot_classes = ['original', '90 rotation', '180 rotation', '270 rotation']

# initialize network
net_block3 = RN.RotNet(num_classes=4, num_conv_block=3, add_avg_pool=False)

# train network
rot_block3_loss_log, _, rot_block3_test_accuracy_log, _, _ = tr.adaptive_learning(
    [0.1, 0.02, 0.004, 0.0008], [60, 120, 160, 200],
    0.9,
    5e-4,
    net_block3,
    criterion,
    trainloader,
    None,
    testloader,
    rot=['90', '180', '270'])