Exemplo n.º 1
0
opts_file = open(path.join(logdir, 'opts.txt'), 'wt')
opts_file.write(str(opt))
opts_file.close()

loss_outfile = open(path.join(logdir, 'losses.jsonl'), 'wt')

# Load dataset
if os.path.isfile('celeba.pickle'):
    print('loading dataset cached')
    dataset = pickle.load(open('celeba.pickle', 'rb'))
else:
    print('loading dataset new')
    dataset = ImageFolder(root=opt.datadir,
                          transform=transforms.Compose([
                              transforms.ToTensor(),
                              transforms.Normalize((0.5, 0.5, 0.5),
                                                   (0.5, 0.5, 0.5)),
                          ]))
    print('dumping datset to celeba.pickle')
    pickle.dump(dataset, open('celeba.pickle', 'wb'))

dataloader = torch.utils.data.DataLoader(dataset,
                                         batch_size=opt.batchsize,
                                         shuffle=True,
                                         num_workers=int(2))

if opt.manualSeed is None:
    opt.manualSeed = random.randint(1, 10000)
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
Exemplo n.º 2
0
        scores = []
        for i in range(part.shape[0]):
            pyx = part[i, :]
            scores.append(entropy(pyx, py))
        split_scores.append(np.exp(np.mean(scores)))

    return np.mean(split_scores), np.std(split_scores)


if __name__ == '__main__':
    # class IgnoreLabelDataset(torch.utils.data.Dataset):
    #     def __init__(self, orig):
    #         self.orig = orig
    #
    #     def __getitem__(self, index):
    #         return self.orig[index][0]
    #
    #     def __len__(self):
    #         return len(self.orig)

    image = ImageFolder()

    print("Calculating Inception Score...")
    #print (inception_score(IgnoreLabelDataset(image), cuda=True, batch_size=32, resize=True, splits=10))
    print(
        inception_score(image,
                        cuda=True,
                        batch_size=32,
                        resize=True,
                        splits=10))
Exemplo n.º 3
0
                             convert_to_rgb(),
                             transforms.ToTensor(),
                             transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
                             ]),
    'art_Val' : transforms.Compose([
        transforms.Resize((256, 256)),
        convert_to_rgb(),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
        ])
}
batch_size = 8
artLen = 300

data_dir = "../../../SwimData/GeoCodes/classification2"
image_datasets = {x: ImageFolder(os.path.join(data_dir, x.split("_")[0]),
                                          data_transforms[x])
                  for x in ['art_Train', 'art_Val', 'real_Train', 'real_Val']}
realMaster = image_datasets['real_Train']

dataset_sizes = {x: len(image_datasets[x]) for x in ['art_Train', 'art_Val', 
                                                     'real_Train', 'real_Val']}

class_names = image_datasets['art_Train'].classes
print('Types of classes:',class_names)

classCounts = {x:[image_datasets[x].targets.count(Class) for
              Class in np.arange(len(class_names))]
               for x in ['art_Train', 'art_Val', 'real_Train', 'real_Val']}

sampleWeights = {x: np.array([1.0/np.array(classCounts[x])[t] for t in image_datasets[x].targets])
                 for x in ['art_Train', 'art_Val', 'real_Train', 'real_Val']}
import os
from imagefolder import ImageFolder

if not os.path.exists('images_dataset_tar'):
    os.makedirs('images_dataset_tar')

im = ImageFolder([
    './images_dataset_ds/train/011k07', './images_dataset_ds/train/015x4r',
    './images_dataset_ds/train/01bqk0', './images_dataset_ds/train/01jfm_',
    './images_dataset_ds/train/01s105'
])

im.to_tar('./images_dataset_tar/train_images_1.tar.gz')

im = ImageFolder([
    './images_dataset_ds/val/011k07', './images_dataset_ds/val/015x4r',
    './images_dataset_ds/val/01bqk0', './images_dataset_ds/val/01jfm_',
    './images_dataset_ds/val/01s105'
])

im.to_tar('./images_dataset_tar/val_images_1.tar.gz')

im = ImageFolder([
    './images_dataset_ds/train/021sj1', './images_dataset_ds/train/02d9qx',
    './images_dataset_ds/train/02s195', './images_dataset_ds/train/034c16',
    './images_dataset_ds/train/03l9g'
])

im.to_tar('./images_dataset_tar/train_images_2.tar.gz')

im = ImageFolder([
Exemplo n.º 5
0
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ]),
    'artVal':
    transforms.Compose([
        transforms.Resize((256, 256)),
        convert_to_rgb(),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ])
}
batch_size = 8
artLen = 1500

data_dir = "../../../SwimData/GeoCodes/classifier4"
image_datasets = {
    x: ImageFolder(os.path.join(data_dir, x), data_transforms[x])
    for x in ['realTrain', 'realVal', "artTrain", "artVal"]
}
dataset_sizes = {
    x: len(image_datasets[x])
    for x in ['realTrain', 'realVal', "artTrain", "artVal"]
}

class_names = image_datasets['realTrain'].classes
print('Types of classes:', class_names)

classCounts = {
    x: [
        image_datasets[x].targets.count(Class)
        for Class in np.arange(len(class_names))
    ]