Ejemplo n.º 1
0
            transforms.Resize(opt.img_size),
            transforms.ToTensor(),
            transforms.Normalize([0.5], [0.5])
        ]),
    ),
    batch_size=opt.batch_size,
    shuffle=True,
)

os.makedirs("../../data/mnistm", exist_ok=True)
dataloader_B = torch.utils.data.DataLoader(
    MNISTM(
        "../../data/mnistm",
        train=True,
        download=True,
        transform=transforms.Compose([
            transforms.Resize(opt.img_size),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
        ]),
    ),
    batch_size=opt.batch_size,
    shuffle=True,
)

# Optimizers
optimizer_G = torch.optim.Adam(itertools.chain(generator.parameters(),
                                               classifier.parameters()),
                               lr=opt.lr,
                               betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(discriminator.parameters(),
                               lr=opt.lr,
        acc /= 10000.0
        '''
        return j, acc


net = FunctionJ()
if Use_GPU:
    net = net.cuda(GPU_Num)
optimizer = opt.Adam(net.parameters(), lr=Learning_Rate, betas=(0.9, 0.99))
epoch = 0
transform = transforms.Compose([transforms.ToTensor()])
# dataset_train = datasets.MNIST(root='/media/data/zhaoyin/', transform=transform)
# dataset_test = datasets.MNIST(root='/media/data/zhaoyin/', train=False, transform=transform)
mnist_root = '/media/data/zhaoyin'
root = '/media/data/zhaoyin/mnistm'
dataset_train = MNISTM(root=root, mnist_root=mnist_root, transform=transform)
dataset_test = MNISTM(root=root,
                      mnist_root=mnist_root,
                      train=False,
                      transform=transform)

loss_ls = []
acc_ls = []


def isfinish(li):
    fi = 0
    for t in range(len(li) - 1):
        fi += abs(li[t + 1] - li[t])
    if fi < li[-1] / 20:
        return True
Ejemplo n.º 3
0
    train=True,
    download=True,
    transform=transforms.Compose([
        transforms.Resize(opt.img_size),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    ])),
                                           batch_size=opt.batch_size,
                                           shuffle=True)

os.makedirs('../../data/mnistm', exist_ok=True)
dataloader_B = torch.utils.data.DataLoader(MNISTM(
    '../../data/mnistm',
    train=True,
    download=True,
    transform=transforms.Compose([
        transforms.Resize(opt.img_size),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    ])),
                                           batch_size=opt.batch_size,
                                           shuffle=True)

# Optimizers
optimizer_G = torch.optim.Adam(itertools.chain(generator.parameters(),
                                               classifier.parameters()),
                               lr=opt.lr,
                               betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(discriminator.parameters(),
                               lr=opt.lr,
                               betas=(opt.b1, opt.b2))
Ejemplo n.º 4
0
    dataset = torchvision.datasets.STL10(root=dset_dir,
                                         download=True,
                                         split='train')
    print(dataset.data.shape)
    print(dataset.data.mean(axis=(0, 2, 3)) / 255)
    print(dataset.data.std(axis=(0, 2, 3)) / 255)

elif dset_name == "mnist":
    dataset = torchvision.datasets.MNIST(root=dset_dir, train=True)
    print(list(dataset.train_data.size()))
    print(dataset.train_data.float().mean() / 255)
    print(dataset.train_data.float().std() / 255)

elif dset_name == "mnistm":
    from mnistm import MNISTM
    dataset = MNISTM(root=dset_dir, train=True)
    print(list(dataset.train_data.size()))
    for dim in range(3):
        print(dim)
        print(dataset.train_data[:, :, :, dim].float().mean() / 255)
        print(dataset.train_data[:, :, :, dim].float().std() / 255)

elif dset_name == "svhn":
    dataset = torchvision.datasets.SVHN(root=dset_dir,
                                        download=True,
                                        split='train')
    print(dataset.data.shape)
    print(dataset.data.mean(axis=(0, 2, 3)) / 255)
    print(dataset.data.std(axis=(0, 2, 3)) / 255)

elif dset_name == "usps":