shuffle=False) for phase in ['valid'] } use_gpu = torch.cuda.is_available() module = Module() module.load_state_dict(torch.load(pretrained)) fid = open('parameters', 'wb+') for param in module.parameters(): b = param.data.numpy() fid.write(b) fid.close() if use_gpu: module.cuda() module = nn.DataParallel(module, gpu) for stage in ([0] * 1): # for epoch in range(1): for phase in ["valid"]: print("Testing...") module.train(False) for param in module.parameters(): param.requires_grad_(False) running_dist = 0. for batch, data in enumerate(dataloader[phase], 1): x, t, idx = data if use_gpu: x = x.cuda()