Example #1
0
SIZE = 128
HEIGHT = 137
WIDTH = 236
OUT_DIR = 'models'

# https://albumentations.readthedocs.io/en/latest/api/augmentations.html
data_transforms = albumentations.Compose([
    albumentations.ShiftScaleRotate(shift_limit=0.0625,
                                    scale_limit=0.1,
                                    rotate_limit=(15, 30),
                                    p=0.5),
    albumentations.CenterCrop(96, 96, p=1),
    albumentations.Cutout(p=0.3),
    albumentations.Resize(128, 128, p=1),
    albumentations.OneOf([
        GridMask(num_grid=3, rotate=(15, 30), p=0.3),
        GridMask(num_grid=4, rotate=(5, 15), p=0.3),
    ],
                         p=0.3),
])

data_transforms_test = albumentations.Compose([
    albumentations.Flip(p=0),
    albumentations.CenterCrop(96, 96, p=1),
    albumentations.Resize(128, 128, p=1),
])


class BengaliAIDataset(torch.utils.data.Dataset):
    def __init__(self, df, y=None, transform=None):
        self.df = df.iloc[:, 1:].values
Example #2
0
LOGGER_PATH = f"logs/log_{EXP_ID}.txt"
setup_logger(out_file=LOGGER_PATH)
LOGGER.info("seed={}".format(SEED))

SIZE = 128
HEIGHT = 137
WIDTH = 236
OUT_DIR = 'models'

# https://albumentations.readthedocs.io/en/latest/api/augmentations.html
data_transforms = albumentations.Compose([
    albumentations.Flip(p=0.2),
    albumentations.Rotate(limit=15, p=0.2),
    albumentations.ShiftScaleRotate(rotate_limit=15, p=0.5),
    albumentations.Cutout(p=0.2),
    GridMask(num_grid=3, rotate=15, p=0.3),
])
'''
data_transforms = albumentations.Compose([ 
    albumentations.ShiftScaleRotate(p=1,border_mode=cv2.BORDER_CONSTANT,value =1),
    GridMask(num_grid=3, rotate=15, p=0.3),
    albumentations.OneOf([
        albumentations.ElasticTransform(p=0.1, alpha=1, sigma=50, alpha_affine=50,border_mode=cv2.BORDER_CONSTANT,value =1),
        albumentations.GridDistortion(distort_limit =0.05 ,border_mode=cv2.BORDER_CONSTANT,value =1, p=0.1),
        albumentations.OpticalDistortion(p=0.1, distort_limit= 0.05, shift_limit=0.2,border_mode=cv2.BORDER_CONSTANT,value =1)                  
        ], p=0.3),
    albumentations.OneOf([
        albumentations.GaussNoise(var_limit=1.0),
        albumentations.Blur(),
        albumentations.GaussianBlur(blur_limit=3)
        ], p=0.4),    
Example #3
0
SIZE = 128
HEIGHT = 137
WIDTH = 236
OUT_DIR = 'models'

data_transforms_96 = albumentations.Compose([
    albumentations.ShiftScaleRotate(shift_limit=0.0625,
                                    scale_limit=0.1,
                                    rotate_limit=(5, 15),
                                    p=0.5),
    albumentations.CenterCrop(96, 96, p=1),
    albumentations.Resize(256, 256, p=1),
    albumentations.Cutout(num_holes=1, max_h_size=40, max_w_size=15, p=0.1),
    albumentations.OneOf([
        GridMask(num_grid=1, rotate=(5, 15), p=0.15),
        GridMask(num_grid=2, rotate=(5, 15), p=0.15),
    ],
                         p=0.1)
])

data_transforms_104 = albumentations.Compose([
    albumentations.ShiftScaleRotate(shift_limit=0.0625,
                                    scale_limit=0.1,
                                    rotate_limit=(5, 15),
                                    p=0.3),
    albumentations.CenterCrop(104, 104, p=1),
    albumentations.Resize(256, 256, p=1),
    albumentations.Cutout(p=0.3),
    albumentations.OneOf([
        GridMask(num_grid=2, rotate=(5, 15), p=0.3),