Example #1
0
    def build_model(self):
        # Define a generator and a discriminator
        from models import Discriminator
        from models import AdaInGEN as Generator
        self.count = 0
        self.D = Discriminator(
            self.config, debug=self.config.mode == 'train' and self.verbose)
        self.D = to_cuda(self.D)
        self.G = Generator(
            self.config, debug=self.config.mode == 'train' and self.verbose)
        self.G = to_cuda(self.G)

        if self.config.mode == 'train':
            self.d_optimizer = self.set_optimizer(
                self.D, self.config.d_lr, self.config.beta1, self.config.beta2)
            self.g_optimizer = self.set_optimizer(
                self.G, self.config.g_lr, self.config.beta1, self.config.beta2)

        # Start with trained model
        if self.config.pretrained_model and self.verbose:
            self.load_pretrained_model()

        if self.config.mode == 'train' and self.verbose:
            self.print_network(self.D, 'Discriminator')
            self.print_network(self.G, 'Generator')
Example #2
0
    def INCEPTION_REAL(self):
        from misc.utils import load_inception
        from scipy.stats import entropy
        net = load_inception()
        net = to_cuda(net)
        net.eval()
        inception_up = nn.Upsample(size=(299, 299), mode='bilinear')
        mode = 'Real'
        data_loader = self.data_loader
        file_name = 'scores/Inception_{}.txt'.format(mode)

        PRED_IS = {i: [] for i in range(len(data_loader.dataset.labels[0]))}
        IS = {i: [] for i in range(len(data_loader.dataset.labels[0]))}

        for i, (real_x, org_c, files) in tqdm(
                enumerate(data_loader),
                desc='Calculating CIS/IS - {}'.format(file_name),
                total=len(data_loader)):
            label = torch.max(org_c, 1)[1][0]
            real_x = to_var((real_x + 1) / 2., volatile=True)
            pred = to_data(F.softmax(net(inception_up(real_x)), dim=1),
                           cpu=True).numpy()
            PRED_IS[int(label)].append(pred)

        for label in range(len(data_loader.dataset.labels[0])):
            PRED_IS[label] = np.concatenate(PRED_IS[label], 0)
            # prior is computed from all outputs
            py = np.sum(PRED_IS[label], axis=0)
            for j in range(PRED_IS[label].shape[0]):
                pyx = PRED_IS[label][j, :]
                IS[label].append(entropy(pyx, py))

        total_is = []
        file_ = open(file_name, 'w')
        for label in range(len(data_loader.dataset.labels[0])):
            _is = np.exp(np.mean(IS[label]))
            total_is.append(_is)
            PRINT(file_, "Label {}".format(label))
            PRINT(file_, "Inception Score: {:.4f}".format(_is))
        PRINT(file_, "")
        PRINT(
            file_, "[TOTAL] Inception Score: {:.4f} +/- {:.4f}".format(
                np.array(total_is).mean(),
                np.array(total_is).std()))
        file_.close()
Example #3
0
def train_inception(batch_size, shuffling=False, num_workers=4, **kwargs):

    from torchvision.models import inception_v3
    from misc.utils import to_var, to_cuda, to_data
    from torchvision import transforms
    from torch.utils.data import DataLoader
    import torch.nn.functional as F
    import torch
    import torch.nn as nn
    import tqdm

    metadata_path = os.path.join('data', 'RafD', 'normal')
    # inception Norm

    image_size = 299

    transform = []
    window = int(image_size / 10)
    transform += [
        transforms.Resize((image_size + window, image_size + window),
                          interpolation=Image.ANTIALIAS)
    ]
    transform += [
        transforms.RandomResizedCrop(image_size,
                                     scale=(0.7, 1.0),
                                     ratio=(0.8, 1.2))
    ]
    transform += [transforms.RandomHorizontalFlip()]
    transform += [transforms.ToTensor()]
    transform = transforms.Compose(transform)

    dataset_train = RafD(image_size,
                         metadata_path,
                         transform,
                         'train',
                         shuffling=True,
                         **kwargs)
    dataset_test = RafD(image_size,
                        metadata_path,
                        transform,
                        'test',
                        shuffling=False,
                        **kwargs)

    train_loader = DataLoader(dataset=dataset_train,
                              batch_size=batch_size,
                              shuffle=False,
                              num_workers=num_workers)
    test_loader = DataLoader(dataset=dataset_test,
                             batch_size=batch_size,
                             shuffle=False,
                             num_workers=num_workers)

    num_labels = len(train_loader.dataset.labels[0])
    n_epochs = 100
    net = inception_v3(pretrained=True, transform_input=True)
    net.aux_logits = False
    num_ftrs = net.fc.in_features
    net.fc = nn.Linear(num_ftrs, num_labels)

    net_save = metadata_path + '/inception_v3/{}.pth'
    if not os.path.isdir(os.path.dirname(net_save)):
        os.makedirs(os.path.dirname(net_save))
    print("Model will be saved at: " + net_save)
    optimizer = torch.optim.RMSprop(net.parameters(), lr=1e-5)
    # loss = F.cross_entropy(output, target)
    to_cuda(net)

    for epoch in range(n_epochs):
        LOSS = {'train': [], 'test': []}
        OUTPUT = {'train': [], 'test': []}
        LABEL = {'train': [], 'test': []}

        net.eval()
        for i, (data, label,
                files) in tqdm.tqdm(enumerate(test_loader),
                                    total=len(test_loader),
                                    desc='Validating Inception V3 | RafD'):
            data = to_var(data, volatile=True)
            label = to_var(torch.max(label, dim=1)[1], volatile=True)
            out = net(data)
            loss = F.cross_entropy(out, label)
            # ipdb.set_trace()
            LOSS['test'].append(to_data(loss, cpu=True)[0])
            OUTPUT['test'].extend(
                to_data(F.softmax(out, dim=1).max(1)[1], cpu=True).tolist())
            LABEL['test'].extend(to_data(label, cpu=True).tolist())
        acc_test = (np.array(OUTPUT['test']) == np.array(LABEL['test'])).mean()
        print('[Test] Loss: {:.4f} Acc: {:.4f}'.format(
            np.array(LOSS['test']).mean(), acc_test))

        net.train()
        for i, (data, label, files) in tqdm.tqdm(
                enumerate(train_loader),
                total=len(train_loader),
                desc='[{}/{}] Train Inception V3 | RafD'.format(
                    str(epoch).zfill(5),
                    str(n_epochs).zfill(5))):
            # ipdb.set_trace()
            data = to_var(data)
            label = to_var(torch.max(label, dim=1)[1])
            out = net(data)
            # ipdb.set_trace()
            loss = F.cross_entropy(out, label)
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
            LOSS['train'].append(to_data(loss, cpu=True)[0])
            OUTPUT['train'].extend(
                to_data(F.softmax(out, dim=1).max(1)[1], cpu=True).tolist())
            LABEL['train'].extend(to_data(label, cpu=True).tolist())

        acc_train = (np.array(OUTPUT['train']) == np.array(
            LABEL['train'])).mean()
        print('[Train] Loss: {:.4f} Acc: {:.4f}'.format(
            np.array(LOSS['train']).mean(), acc_train))
        torch.save(net.state_dict(), net_save.format(str(epoch).zfill(5)))
        train_loader.dataset.shuffle(epoch)
Example #4
0
    def INCEPTION(self):
        from misc.utils import load_inception
        from scipy.stats import entropy
        n_styles = 20
        net = load_inception()
        net = to_cuda(net)
        net.eval()
        self.G.eval()
        inception_up = nn.Upsample(size=(299, 299), mode='bilinear')
        mode = 'SMIT'
        data_loader = self.data_loader
        file_name = 'scores/Inception_{}.txt'.format(mode)

        PRED_IS = {i: []
                   for i in range(len(data_loader.dataset.labels[0]))
                   }  # 0:[], 1:[], 2:[]}
        CIS = {i: [] for i in range(len(data_loader.dataset.labels[0]))}
        IS = {i: [] for i in range(len(data_loader.dataset.labels[0]))}

        for i, (real_x, org_c, files) in tqdm(
                enumerate(data_loader),
                desc='Calculating CIS/IS - {}'.format(file_name),
                total=len(data_loader)):
            PRED_CIS = {
                i: []
                for i in range(len(data_loader.dataset.labels[0]))
            }  # 0:[], 1:[], 2:[]}
            org_label = torch.max(org_c, 1)[1][0]
            real_x = real_x.repeat(n_styles, 1, 1, 1)  # .unsqueeze(0)
            real_x = to_var(real_x, volatile=True)

            target_c = (org_c * 0).repeat(n_styles, 1)
            target_c = to_var(target_c, volatile=True)
            for label in range(len(data_loader.dataset.labels[0])):
                if org_label == label:
                    continue
                target_c *= 0
                target_c[:, label] = 1
                style = to_var(self.G.random_style(n_styles),
                               volatile=True) if mode == 'SMIT' else None

                fake = (self.G(real_x, target_c, style)[0] + 1) / 2

                pred = to_data(F.softmax(net(inception_up(fake)), dim=1),
                               cpu=True).numpy()
                PRED_CIS[label].append(pred)
                PRED_IS[label].append(pred)

                # CIS for each image
                PRED_CIS[label] = np.concatenate(PRED_CIS[label], 0)
                py = np.sum(
                    PRED_CIS[label], axis=0
                )  # prior is computed from outputs given a specific input
                for j in range(PRED_CIS[label].shape[0]):
                    pyx = PRED_CIS[label][j, :]
                    CIS[label].append(entropy(pyx, py))

        for label in range(len(data_loader.dataset.labels[0])):
            PRED_IS[label] = np.concatenate(PRED_IS[label], 0)
            py = np.sum(PRED_IS[label],
                        axis=0)  # prior is computed from all outputs
            for j in range(PRED_IS[label].shape[0]):
                pyx = PRED_IS[label][j, :]
                IS[label].append(entropy(pyx, py))

        total_cis = []
        total_is = []
        file_ = open(file_name, 'w')
        for label in range(len(data_loader.dataset.labels[0])):
            cis = np.exp(np.mean(CIS[label]))
            total_cis.append(cis)
            _is = np.exp(np.mean(IS[label]))
            total_is.append(_is)
            PRINT(file_, "Label {}".format(label))
            PRINT(file_, "Inception Score: {:.4f}".format(_is))
            PRINT(file_, "conditional Inception Score: {:.4f}".format(cis))
        PRINT(file_, "")
        PRINT(
            file_, "[TOTAL] Inception Score: {:.4f} +/- {:.4f}".format(
                np.array(total_is).mean(),
                np.array(total_is).std()))
        PRINT(
            file_,
            "[TOTAL] conditional Inception Score: {:.4f} +/- {:.4f}".format(
                np.array(total_cis).mean(),
                np.array(total_cis).std()))
        file_.close()