def ultrasound_transform(self):

        train_transform = ts.Compose([ts.ToTensor(),
                                      ts.TypeCast(['float']),
                                      ts.AddChannel(axis=0),
                                      ts.SpecialCrop(self.patch_size,0),
                                      ts.RandomFlip(h=True, v=False, p=self.random_flip_prob),
                                      ts.RandomAffine(rotation_range=self.rotate_val,
                                                      translation_range=self.shift_val,
                                                      zoom_range=self.scale_val,
                                                      interp=('bilinear')),
                                      ts.StdNormalize(),
                                ])

        valid_transform = ts.Compose([ts.ToTensor(),
                                      ts.TypeCast(['float']),
                                      ts.AddChannel(axis=0),
                                      ts.SpecialCrop(self.patch_size,0),
                                      ts.StdNormalize(),
                                ])

        return {'train': train_transform, 'valid': valid_transform}
from models import Net
from utils import ScalarEncoder, accuracy, AverageMeter, make_dataset, save_model, print_metrics
from logger import Logger
from sklearn.model_selection import KFold


data = pd.read_json("data/train.json")
data["band_1"] = data["band_1"].apply(lambda x: np.array(x).reshape(75, 75))
data["band_2"] = data["band_2"].apply(lambda x: np.array(x).reshape(75, 75))
data["inc_angle"] = pd.to_numeric(data["inc_angle"], errors="coerce")


# Augmentation
affine_transforms = transforms.RandomAffine(rotation_range=None, translation_range=0.1, zoom_range=(0.95, 1.05))
rand_flip = transforms.RandomFlip(h=True, v=False)
std_normalize = transforms.StdNormalize()
my_transforms = transforms.Compose([rand_flip, std_normalize])
# scalar encoder for incident angles
encoder = ScalarEncoder(100, 30, 45)
# using folding to create 5 train-validation sets to train 5 networks
kf = KFold(n_splits=5, shuffle=True, random_state=100)
kfold_datasets = []
networks = []
optimizers = []
for train_index, val_index in kf.split(data):
    train_dataset = make_dataset(data.iloc[train_index], encoder, my_transforms)
    val_dataset = make_dataset(data.iloc[val_index], encoder, my_transforms)
    kfold_datasets.append({"train": train_dataset, "val": val_dataset})
    # A new net for each train-validation dataset
    networks.append(Net().cuda())
    optimizers.append(Adam(networks[-1].parameters(), lr=0.0005, weight_decay=0.0002))