Пример #1
0
        return (self.x[idx], self.t[idx], idx)


# ===================================================================================================== #
batch_size = {'train': 32, 'valid': 32}
dataloader = {
    phase:
    torch.utils.data.DataLoader(dataset=SrDataset(phase, dire, width, height),
                                batch_size=batch_size[phase],
                                shuffle=False)
    for phase in ['valid']
}

use_gpu = torch.cuda.is_available()
module = Module()
module.load_state_dict(torch.load(pretrained))

fid = open('parameters', 'wb+')
for param in module.parameters():
    b = param.data.numpy()
    fid.write(b)
fid.close()

if use_gpu:
    module.cuda()
    module = nn.DataParallel(module, gpu)

for stage in ([0] * 1):
    #   for epoch in range(1):
    for phase in ["valid"]:
        print("Testing...")