Beispiel #1
0
# dstroot for saving models.
# logroot for writting some log(s), if is needed.
dstroot = './trainedmodels/' + data_name + '/' + tag + '/'
logroot = './logs/' + data_name + '/' + tag + '/'
subprocess.check_output(['mkdir', '-p', dstroot])
subprocess.check_output(['mkdir', '-p', logroot])

# Transform
transform = transforms.Compose(
    [transforms.RandomCrop((crop_size, crop_size)),
     transforms.ToTensor()])
# Dataloader
convertor = data_convertors.ConvertImageSet(data_root,
                                            imlist_pth,
                                            data_name,
                                            transform=transform,
                                            is_train=True)
dataloader = DataLoader(convertor,
                        batch_size=bch_size,
                        shuffle=False,
                        num_workers=2)

# Make network
cleaner = cleaner().cuda()
cleaner.train()

# Optimizer and Loss
optimizer = optim.Adam(cleaner.parameters(), lr=base_lr)
L2_loss = nn.MSELoss()
if data_name == 'RESIDE':
    testroot = "../data/"+data_name+"/sots_indoor_test/"
    test_list_pth = "../lists/RESIDE_indoor/sots_test_list.txt"
elif data_name == 'DCPDNData':    
    testroot = "../data/"+data_name+"/TestA/"
    test_list_pth = '../lists/'+data_name+'/testA_list.txt'
else:
    print('Unknown dataset name.')

Pretrained = '../trainedmodels/'+data_name+'/'+tag+'_model.pt'    
show_dst = '../cleaned_images/'+data_name+'/'+tag+'/'
#subprocess.check_output(['mkdir', '-p', show_dst])

# Set transformer, convertor, and data_loader
transform = transforms.ToTensor()
convertor = data_convertors.ConvertImageSet(testroot, test_list_pth, data_name,
                                            transform=transform)
dataloader = DataLoader(convertor, batch_size=1, shuffle=False, num_workers=1)

# Make the network
cleaner = cleaner().cuda()
cleaner.load_state_dict(torch.load(Pretrained))
cleaner.eval()

I_HAZE = "D:\Image_dataset\# I-HAZY NTIRE 2018\hazy1\\"
I_HAZE_GT = "D:\Image_dataset\# I-HAZY NTIRE 2018\GT1\\"
O_HAZE="D:\Image_dataset\# O-HAZY NTIRE 2018\hazy1\\"
O_HAZE_GT="D:\Image_dataset\# O-HAZY NTIRE 2018\GT1\\"
SOTSI = "C:\\Users\FQL\Desktop\RESIDE-standard\SOTS\indoor\hazy\\"
SOTSI_GT = "C:\\Users\FQL\Desktop\RESIDE-standard\SOTS\indoor\gt1\\"
SOTSO = "F:\SOTS\hazy\\"
SOTSO_GT = "F:\SOTS\clear\\"
# Set pathes
data_root  = '../data/' +data_name+'/train/train/'
imlist_pth = '../lists/'+data_name+'/train_list.txt'

# dstroot for saving models. 
# logroot for writting some log(s), if is needed.
dstroot = './trainedmodels/'+data_name+'/'+tag+'/'
logroot = './logs/'+data_name+'/'+tag+'/'
subprocess.check_output(['mkdir', '-p', dstroot])
subprocess.check_output(['mkdir', '-p', logroot])

# Transform
transform = transforms.ToTensor()
# Dataloader
convertor  = data_convertors.ConvertImageSet(data_root, imlist_pth, data_name,
                                             transform=transform, is_train=True,
                                             with_aug=with_data_aug, crop_size=crop_size)
dataloader = DataLoader(convertor, batch_size=bch_size, shuffle=False, num_workers=4)

# Make network
cleaner = cleaner().cuda()
cleaner.train()

# Optimizer and Loss
optimizer = optim.Adam(cleaner.parameters(), lr=base_lr)
L1_loss = nn.L1Loss()

# Start training
print('Start training...')
for epoch in range(epoch_size):        
    for iteration, data in enumerate(dataloader):