Exemple #1
0
def evaluate(model_name, dataset, quadratic=False):
    
    print(f'[{time.ctime()}] Start evaluating {model_name} on {dataset}')
    
    # quadratic = 'quad' in model_name
    
    if model_name == 'sepconv':
        import utilities
        model = utilities.get_sepconv(weights='l1').cuda()
    elif model_name == 'qvi-lin' or model_name =='qvi-quad':
        from code.quadratic.interpolate import interpolate as model
    elif model_name == 'dain':
        from code.DAIN.interpolate import interpolate_efficient as model
    elif model_name == 'sepconv2':
        checkpoint= torch.load('models/checkpoint_1593886534_seed_0_optimizer=adamax_input_size=4_lr=0.001_lr2=0.0001_weights=None_kernel_size=45_loss=l1_pretrain=1_kernel_size_d=31_kernel_size_scale=4_kernel_size_qd=25_kernel_size_qd_scale=4')
        model = checkpoint['last_model'].cuda().eval()
    else:
        raise NotImplementedError()
        
    torch.manual_seed(42)
    np.random.seed(42)
    
    results = defaultdict(list)
    if dataset == 'lmd':
        ds = dataloader.large_motion_dataset2(quadratic=quadratic, fold='test', cropped=False)
    elif dataset == 'adobe240':
        ds = dataloader.adobe240_dataset(quadratic=quadratic, fold='test')
    elif dataset == 'gopro':
        ds = dataloader.gopro_dataset(quadratic=quadratic, fold='test')
    elif dataset == 'vimeo90k':
        ds = dataloader.vimeo90k_dataset(quadratic=quadratic, fold='test')
    else:
        raise NotImplementedError()

    ds = dataloader.TransformedDataset(ds, normalize=True, random_crop=False, flip_probs=0)    
    
    _, _, test = dataloader.split_data(ds, [0, 0, 1])
    
    data_loader = torch.utils.data.DataLoader(test, batch_size=1)
    with torch.no_grad():
        for X,y in tqdm(data_loader, total=len(data_loader)):
            X = X.cuda()
            y = y.cuda()
            
            y_hat = model(X).clamp(0,1)
            
            y.mul_(255)
            y_hat.mul_(255)
            
            results['psnr'].extend(metrics.psnr(y_hat, y))
            results['ie'].extend(metrics.interpolation_error(y_hat, y))
            results['ssim'].extend(metrics.ssim(y_hat, y))
    
    # store in dataframe
    results = pd.DataFrame(results)
    results['model'] = model_name
    results['dataset'] = dataset
    
    return results
Exemple #2
0
    def do_epoch(dataloader, fold, epoch, train=False):
        assert fold in FOLDS

        if verbose:
            pb = tqdm(desc=f'{fold} {epoch+1}/{n_epochs}',
                      total=len(dataloader),
                      leave=True,
                      position=0)

        for i, (X, y) in enumerate(dataloader):
            X = X.cuda()
            y = y.cuda()

            y_hat = G(X)

            l1_loss = L1_loss(y_hat, y)
            feature_loss = Perc_loss(y_hat, y)

            lf_loss = l1_loss + feature_loss

            if train:
                optimizer.zero_grad()
                lf_loss.backward()
                optimizer.step()

            # compute metrics
            y_hat = (y_hat * 255).clamp(0, 255)
            y = (y * 255).clamp(0, 255)

            psnr = metrics.psnr(y_hat, y)
            ssim = metrics.ssim(y_hat, y)
            ie = metrics.interpolation_error(y_hat, y)

            results.store(
                fold, epoch, {
                    'L1_loss': l1_loss.item(),
                    'psnr': psnr,
                    'ssim': ssim,
                    'ie': ie,
                    'lf': lf_loss.item()
                })

            if verbose: pb.update()

        # update tensorboard
        results.write_tensorboard(fold, epoch)
        sys.stdout.flush()
Exemple #3
0
    gen = dataloader.get_datagenerator(dataset, quadratic=is_quadratic)

    for inputs, ii in tqdm(gen, total=N_TEST):
        
        cornercrops1 = cropper.crop(inputs[0].numpy())
        cornercrops2 = cropper.crop(inputs[1].numpy())

        cropped_results = []
        for corner1, corner2 in zip(cornercrops1, cornercrops2):
            corner1 = torch.Tensor(np.array(corner1))
            corner2 = torch.Tensor(np.array(corner2))
            result = interpolate([corner1, corner2])
            cropped_results.append(result)
        

        result = torch.Tensor(cropper.decrop(*cropped_results)).int()

        result = result.unsqueeze(0)
        ii = ii.unsqueeze(0)

        # compute metrics
        ssim = metrics.ssim(result, ii).item()
        psnr = metrics.psnr(result, ii).item()
        ie = metrics.interpolation_error(result, ii).item()

        results.store(method=method, dataset=dataset, values=[ssim, psnr, ie])
        k+= 1


results.save()