'drn':
        transforms.Compose([
            my_transform.Clahe(clipLimit=4, tileGridSize=(8, 8)),
            my_transform.Resize(size=(512, 1024)),
        ]),
        'seresnext':
        transforms.Compose([
            my_transform.Clahe(clipLimit=4, tileGridSize=(8, 8)),
        ])
    }
    image_datasets = {
        'drn':
        imagefolder.EdgeDataset(
            input_dir=input_dir,
            #label_dir=label_dir,
            input_transform=data_transforms['drn'],
            #label_transform=data_transforms[x],
            mode='predict',
            #randcrop=(1024,1024),
        ),
        'seresnext':
        imagefolder.EdgeDataset(
            input_dir=input_dir,
            #label_dir=label_dir,
            input_transform=data_transforms['seresnext'],
            #label_transform=data_transforms[x],
            mode='predict',
            #randcrop=(1024,1024),
        )
    }

    dataset_size = len(image_datasets['drn'])
Example #2
0
            my_transform.Clahe(clipLimit=4, tileGridSize=(8, 8))
            #transforms.Resize((32, 32),interpolation=Image.BILINEAR),
            #transforms.CenterCrop(224),
            #transforms.ToTensor()
            #transforms.Normalize([0.5, 0.5], [0.5, 0.5])
        ]),
    }
    #data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in["train","validation"]}

    image_datasets = {
        x: imagefolder.EdgeDataset(
            input_dir=input_dir,
            label_dir=label_dir,
            input_transform=data_transforms[x],
            #label_transform=data_transforms[x],
            mode=x,
            randcrop=(1024, 1024) if x == 'train' else None,
            n_class=4,
            seed=seed,
            randhflip=0.5 if x == 'train' else None,
            per_train=per_train)
        for x in ["train", "validation"]
    }
    dataset_sizes = {
        x: len(image_datasets[x])
        for x in ['train', 'validation']
    }
    print(dataset_sizes)

    num_samples = {
        'train':
Example #3
0
    #num_sample=2242
    data_transform = transforms.Compose([
        my_transform.Clahe(clipLimit=4, tileGridSize=(8, 8))
        #transforms.Resize((32,32),interpolation=Image.BILINEAR),
        #transforms.RandomVerticalFlip(),
        #transforms.RandomHorizontalFlip(),
        #transforms.ToTensor()
        #transforms.Normalize([0.5, 0.5], [0.5, 0.5])
        #transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ])
    #data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in["train","validation"]}

    image_dataset = imagefolder.EdgeDataset(
        input_dir=input_dir,
        #label_dir=label_dir,
        input_transform=data_transform,
        #label_transform=data_transforms[x],
        mode='predict',
        #randcrop=(1024,1024),
    )

    dataset_size = len(image_dataset)
    print(dataset_size)

    #num_samples={x:dataset_sizes[x]//20+batch_size-(dataset_sizes[x]//20)%batch_size for x in ['train', 'validation']}
    #sample_weight={x:[1]*dataset_sizes[x] for x in ['train', 'validation']}
    #data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in ["train","validation"]}

    dataloader = torch.utils.data.DataLoader(image_dataset,
                                             batch_size=batch_size,
                                             shuffle=False,
                                             num_workers=2)
Example #4
0
        "validation":
        transforms.Compose([
            my_transform.Clahe(clipLimit=4, tileGridSize=(8, 8))
            #transforms.Resize((32, 32),interpolation=Image.BILINEAR),
            #transforms.CenterCrop(224),
            #transforms.ToTensor()
            #transforms.Normalize([0.5, 0.5], [0.5, 0.5])
        ]),
    }
    #data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in["train","validation"]}

    image_datasets = {
        x: imagefolder.EdgeDataset(
            input_dir=input_dir,
            label_dir=label_dir,
            input_transform=data_transforms[x],
            #label_transform=data_transforms[x],
            mode=x,
            per_train=per_train)
        for x in ["train", "validation"]
    }
    dataset_sizes = {
        x: len(image_datasets[x])
        for x in ['train', 'validation']
    }
    print(dataset_sizes)

    num_samples = {x: dataset_sizes[x] // 20 for x in ['train', 'validation']}
    sample_weight = {
        x: [1] * dataset_sizes[x]
        for x in ['train', 'validation']
Example #5
0
    target_transforms={
        'train': transforms.Compose([
            my_transform.Resize(size=(512,1024)),
        ]),
        "validation": transforms.Compose([
            my_transform.Resize(size=(512,1024)),
        ]),
    }

    #data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in["train","validation"]}

    image_datasets={x:imagefolder.EdgeDataset(input_dir=input_dir,
                                            label_dir=label_dir,
                                            input_transform=data_transforms[x],
                                            label_transform=target_transforms[x] if parsed.use_resize else None,
                                            mode=x,
                                            randcrop=(1024,1024) if x=='train' and not parsed.use_resize else None,
                                            n_class=4,
                                            seed=seed,
                                            randhflip=0.5 if x=='train' else None,
                                            per_train=per_train)
                    for x in ["train","validation"]}
    dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'validation']}
    print(dataset_sizes)

    num_samples={'train':dataset_sizes['train']//2+batch_size-(dataset_sizes['train']//2)%batch_size,
                    'validation':dataset_sizes['validation']//2+batch_size-(dataset_sizes['validation']//2)%batch_size}
    # num_samples={'train':dataset_sizes['train'],
    #                 'validation':dataset_sizes['validation']}

    sample_weight={x:[1]*dataset_sizes[x] for x in ['train', 'validation']}
    data_sampler={x:torch.utils.data.sampler.WeightedRandomSampler(sample_weight[x], num_samples=num_samples[x], replacement=True) for x in ["train","validation"]}