# Train
        net.train()
        cont = 0
        loss_train = 0.0
        grads_loss = 0.0
        RGB_drops = np.array([1] *
                             n_epoch)  # + list(range(5)) + [5]*(n_epoch-10))/5
        #RGB_drops = np.array([0]*n_epoch + list(range(5)) + [5]*(n_epoch-10))/5
        # flip
        #RGB_drops = RGB_drops[::-1]
        # Transforms train
        train_trans = make_train_transforms(drop_p=RGB_drops[epoch])
        test_trans = make_test_transforms(drop_p=RGB_drops[epoch])

        # Create datasets
        dataset = NYUDataset(train_depths, transforms=train_trans)
        dataset_val = NYUDataset(depths_list['val'], transforms=test_trans)
        training_generator = data.DataLoader(dataset, **params)
        val_generator = data.DataLoader(dataset_val, **params_test)

        for _i, (depths, rgbs, filename) in enumerate(training_generator):
            #cont+=1
            iter_train += 1
            # Get items from generator
            inputs, outputs = rgbs.cuda(), depths.cuda()

            #print(torch.max(outputs.view(input.size(0), -1)))

            # Clean grads
            optimizer_ft.zero_grad()
            CLAHE(clip_limit=2),
            IAASharpen(),
            IAAEmboss(),
            RandomBrightnessContrast(),
        ], p=0.3),
        HueSaturationValue(p=0.3),
    ], p=p)


augm = strong_aug(0.9)

depths = ['../sample_images/classroom_000310depth.png','../sample_images/classroom_000350depth.png',
          '../sample_images/classroom_000329depth.png']

# Instantiate a model and dataset
dataset = NYUDataset(depths, is_train = False, transforms=  augm)
# Parameters
params = {'batch_size': 16 ,
          'shuffle': True,
          'num_workers': 12,
          'pin_memory': True}
params_test = {'batch_size': 16 ,
          'shuffle': False,
          'num_workers': 12,
          'pin_memory': True}

training_generator = data.DataLoader(dataset,**params)
val_generator = data.DataLoader(dataset_val,**params_test)

model = RGBDepth_Depth()