Пример #1
0
def cityPIL_randscalecrop(cached_data_file,
                          data_dir,
                          classes,
                          batch_size,
                          num_work=6,
                          scale=(0.5, 2.0),
                          size=(1024, 512),
                          scale1=1,
                          ignore_idx=255):
    print("This input size is  " + str(size))

    if not os.path.isfile(cached_data_file):
        dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
        data = dataLoad.processData()
        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    if isinstance(size, tuple):
        size = size
    else:
        size = (size, size)

    if isinstance(scale, tuple):
        scale = scale
    else:
        scale = (scale, scale)

    train_transforms = pilTransforms.Compose([
        pilTransforms.RandomScale(scale=scale),
        pilTransforms.RandomCrop(crop_size=size, ignore_idx=ignore_idx),
        pilTransforms.RandomFlip(),
        pilTransforms.Normalize(scaleIn=scale1)
    ])
    val_transforms = pilTransforms.Compose(
        [pilTransforms.Resize(size=size),
         pilTransforms.Normalize(scaleIn=1)])
    trainLoader = torch.utils.data.DataLoader(myDataLoader.PILDataset(
        data['trainIm'],
        data['trainAnnot'],
        Double=False,
        transform=train_transforms),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    valLoader = torch.utils.data.DataLoader(myDataLoader.PILDataset(
        data['valIm'],
        data['valAnnot'],
        Double=False,
        transform=val_transforms),
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, valLoader, data
Пример #2
0
def cityCV_randscalecrop(cached_data_file,
                         data_dir,
                         classes,
                         batch_size,
                         size_h,
                         size_w,
                         scale,
                         num_work=6):
    print("This input size is  %d , %d" % (size_h, size_w))
    if not os.path.isfile(cached_data_file):
        dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
        data = dataLoad.processData()
        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    trainDataset_main = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(size_w, size_h),
        cvTransforms.RandomCropResize(32),
        cvTransforms.RandomFlip(),
        # cvTransforms.RandomCrop(64).
        cvTransforms.ToTensor(scale),
        #
    ])

    valDataset = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(size_w, size_h),
        cvTransforms.ToTensor(1),
        #
    ])

    trainLoader = torch.utils.data.DataLoader(myDataLoader.MyDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_main),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    valLoader = torch.utils.data.DataLoader(myDataLoader.MyDataset(
        data['valIm'], data['valAnnot'], transform=valDataset),
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, valLoader, data
Пример #3
0
def cityCVaux_dataloader(cached_data_file,
                         data_dir,
                         classes,
                         batch_size,
                         scaleIn,
                         size=1024,
                         num_work=6):
    if size == 1024:
        scale = [1024, 1536, 1280, 768, 512]
        crop = [32, 96, 96, 32, 12]
    elif size == 2048:
        scale = [2048, 1536, 1280, 1024, 768]
        crop = [96, 96, 64, 32, 32]

    else:
        scale = [1024, 1536, 1280, 768, 512]
        crop = [32, 100, 100, 32, 0]

    if not os.path.isfile(cached_data_file):
        dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
        data = dataLoad.processData()
        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    trainDataset_main = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[0], scale[0] // 2),  #(1024, 512),
        cvTransforms.RandomCropResize(crop[0]),  #(32),
        cvTransforms.RandomFlip(),
        cvTransforms.ToMultiTensor(scaleIn),
        #
    ])
    print("%d , %d image size train with %d crop" %
          (scale[0], scale[0] // 2, crop[0]))

    trainDataset_scale1 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[1], scale[1] // 2),  # 1536, 768
        cvTransforms.RandomCropResize(crop[1]),
        cvTransforms.RandomFlip(),
        cvTransforms.ToMultiTensor(scaleIn),
        #
    ])
    print("%d , %d image size train with %d crop" %
          (scale[1], scale[1] // 2, crop[1]))

    trainDataset_scale2 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[2], scale[2] // 2),  # 1536, 768
        cvTransforms.RandomCropResize(crop[2]),
        cvTransforms.RandomFlip(),
        cvTransforms.ToMultiTensor(scaleIn),
        #
    ])
    print("%d , %d image size train with %d crop" %
          (scale[2], scale[2] // 2, crop[2]))

    trainDataset_scale3 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[3], scale[3] // 2),  #(768, 384),
        cvTransforms.RandomCropResize(crop[3]),
        cvTransforms.RandomFlip(),
        cvTransforms.ToMultiTensor(scaleIn),
        #
    ])
    print("%d , %d image size train with %d crop" %
          (scale[3], scale[3] // 2, crop[3]))

    trainDataset_scale4 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[4], scale[4] // 2),  #(512, 256),
        cvTransforms.RandomCropResize(crop[4]),
        cvTransforms.RandomFlip(),
        cvTransforms.ToMultiTensor(scaleIn),
        #
    ])
    print("%d , %d image size train with %d crop" %
          (scale[4], scale[4] // 2, crop[4]))

    valDataset = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(scale[0], scale[0] // 2),  #(1024, 512),
        cvTransforms.ToMultiTensor(1),
        #
    ])
    print("%d , %d image size validation" % (scale[0], scale[0] // 2))

    trainLoader = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_main),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    trainLoader_scale1 = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_scale1),
                                                     batch_size=batch_size,
                                                     shuffle=True,
                                                     num_workers=num_work,
                                                     pin_memory=True)

    trainLoader_scale2 = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_scale2),
                                                     batch_size=batch_size,
                                                     shuffle=True,
                                                     num_workers=num_work,
                                                     pin_memory=True)

    trainLoader_scale3 = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_scale3),
                                                     batch_size=batch_size + 4,
                                                     shuffle=True,
                                                     num_workers=num_work,
                                                     pin_memory=True)

    trainLoader_scale4 = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['trainIm'], data['trainAnnot'], transform=trainDataset_scale4),
                                                     batch_size=batch_size + 4,
                                                     shuffle=True,
                                                     num_workers=num_work,
                                                     pin_memory=True)

    valLoader = torch.utils.data.DataLoader(myDataLoader.MyAuxDataset(
        data['valIm'], data['valAnnot'], transform=valDataset),
                                            batch_size=batch_size - 2,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, trainLoader_scale1, trainLoader_scale2, trainLoader_scale3, trainLoader_scale4, valLoader, data
Пример #4
0
def portrait_multiCVdataloader(cached_data_file,
                               data_dir,
                               classes,
                               batch_size,
                               scaleIn,
                               w=180,
                               h=320,
                               edge=False,
                               num_work=4,
                               Enc=True,
                               Augset=True):

    if not os.path.isfile(cached_data_file):
        if Augset:
            additional_data = []

            additional_data.append('/Nukki/baidu_V1/')
            additional_data.append('/Nukki/baidu_V2/')

            dataLoad = ld.LoadData(data_dir,
                                   classes,
                                   cached_data_file,
                                   additional=additional_data)
            data = dataLoad.processDataAug()
        else:
            dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
            data = dataLoad.processData()

        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    trainDataset_main = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(w, h),
        cvTransforms.RandomCropResize(32),
        cvTransforms.RandomFlip(),
        # myTransforms.RandomCrop(64).
        cvTransforms.ToTensor(scaleIn),
        #
    ])

    trainDataset_main2 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(224, 224),
        cvTransforms.RandomCropResize(16),
        cvTransforms.RandomFlip(),
        # myTransforms.RandomCrop(64).
        cvTransforms.ToTensor(scaleIn),
        #
    ])

    trainDataset_main3 = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(int(w * 0.8), int(h * 0.8)),
        cvTransforms.RandomCropResize(24),
        cvTransforms.RandomFlip(),
        # myTransforms.RandomCrop(64).
        cvTransforms.ToTensor(scaleIn),
        #
    ])
    valDataset = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(224, 224),
        cvTransforms.ToTensor(scaleIn),
        #
    ])

    print("This stage is Enc" + str(Enc))
    print(" Load public Baidu train dataset")
    trainLoader = torch.utils.data.DataLoader(myDataLoader.CVDataset(
        data['trainIm'],
        data['trainAnnot'],
        transform=trainDataset_main,
        edge=edge,
        Enc=Enc),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    trainLoader2 = torch.utils.data.DataLoader(
        myDataLoader.CVDataset(data['trainIm'],
                               data['trainAnnot'],
                               transform=trainDataset_main2,
                               edge=edge,
                               Enc=Enc),
        batch_size=int(1.5 * batch_size),
        shuffle=True,
        num_workers=num_work,
        pin_memory=True)

    trainLoader3 = torch.utils.data.DataLoader(
        myDataLoader.CVDataset(data['trainIm'],
                               data['trainAnnot'],
                               transform=trainDataset_main3,
                               edge=edge,
                               Enc=Enc),
        batch_size=int(1.8 * batch_size),
        shuffle=True,
        num_workers=num_work,
        pin_memory=True)

    print(" Load public val dataset")

    valLoader = torch.utils.data.DataLoader(myDataLoader.CVDataset(
        data['valIm'],
        data['valAnnot'],
        transform=valDataset,
        edge=True,
        Enc=Enc),
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, trainLoader2, trainLoader3, valLoader, data
Пример #5
0
def portraitPIL_Doublerandscalecrop(cached_data_file,
                                    data_dir,
                                    classes,
                                    batch_size,
                                    scale=(0.8, 1.0),
                                    size=(1024, 512),
                                    scale1=1,
                                    scale2=2,
                                    ignore_idx=255,
                                    edge=False,
                                    num_work=6,
                                    Augset=True):

    print("This input size is  " + str(size))

    if not os.path.isfile(cached_data_file):
        if Augset:
            additional_data = []

            additional_data.append('/Nukki/baidu_V1/')
            additional_data.append('/Nukki/baidu_V2/')

            dataLoad = ld.LoadData(data_dir,
                                   classes,
                                   cached_data_file,
                                   additional=additional_data)
            data = dataLoad.processDataAug()
        else:
            dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
            data = dataLoad.processData()

        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    if isinstance(size, tuple):
        size = size
    else:
        size = (size, size)

    if isinstance(scale, tuple):
        scale = scale
    else:
        scale = (scale, scale)

    train_transforms = pilTransforms.Compose([
        # pilTransforms.data_aug_color(),
        pilTransforms.RandomScale(scale=scale),
        pilTransforms.RandomCrop(crop_size=size, ignore_idx=ignore_idx),
        pilTransforms.RandomFlip(),
        pilTransforms.DoubleNormalize(scale1=scale1, scale2=scale2)
    ])
    val_transforms = pilTransforms.Compose([
        pilTransforms.Resize(size=size),
        # pilTransforms.RandomScale(scale=scale),
        # pilTransforms.RandomCrop(crop_size=size, ignore_idx=ignore_idx),
        # pilTransforms.RandomFlip(),
        pilTransforms.DoubleNormalize(scale1=scale2, scale2=1)
    ])
    trainLoader = torch.utils.data.DataLoader(myDataLoader.PILDataset(
        data['trainIm'],
        data['trainAnnot'],
        Double=True,
        ignore_idx=ignore_idx,
        edge=edge,
        transform=train_transforms),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    valLoader = torch.utils.data.DataLoader(myDataLoader.PILDataset(
        data['valIm'],
        data['valAnnot'],
        Double=True,
        ignore_idx=ignore_idx,
        edge=True,
        transform=val_transforms),
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, valLoader, data
Пример #6
0
def portrait_CVdataloader(cached_data_file,
                          data_dir,
                          classes,
                          batch_size,
                          scaleIn,
                          w=180,
                          h=320,
                          edge=False,
                          num_work=4,
                          Enc=True,
                          Augset=True):

    if not os.path.isfile(cached_data_file):
        if Augset:
            additional_data = []
            additional_data.append('/Nukki/baidu_V1/')
            additional_data.append('/Nukki/baidu_V2/')

            dataLoad = ld.LoadData(data_dir,
                                   classes,
                                   cached_data_file,
                                   additional=additional_data)
            data = dataLoad.processDataAug()
        else:
            dataLoad = ld.LoadData(data_dir, classes, cached_data_file)
            data = dataLoad.processData()

        if data is None:
            print('Error while pickling data. Please check.')
            exit(-1)
    else:
        data = pickle.load(open(cached_data_file, "rb"))

    trainDataset_main = cvTransforms.Compose([
        cvTransforms.Translation(w, h),
        # cvTransforms.data_aug_light(),
        cvTransforms.data_aug_color(),
        cvTransforms.data_aug_blur(),
        cvTransforms.data_aug_noise(),
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(w, h),
        cvTransforms.ToTensor(scaleIn),
        #
    ])

    valDataset = cvTransforms.Compose([
        cvTransforms.Normalize(mean=data['mean'], std=data['std']),
        cvTransforms.Scale(w, h),
        cvTransforms.ToTensor(scaleIn),
        #
    ])

    print("This stage is Enc" + str(Enc))
    print(" Load public Baidu train dataset")
    trainLoader = torch.utils.data.DataLoader(myDataLoader.CVDataset(
        data['trainIm'],
        data['trainAnnot'],
        transform=trainDataset_main,
        edge=edge,
        Enc=Enc),
                                              batch_size=batch_size,
                                              shuffle=True,
                                              num_workers=num_work,
                                              pin_memory=True)

    print(" Load public val dataset")

    valLoader = torch.utils.data.DataLoader(myDataLoader.CVDataset(
        data['valIm'],
        data['valAnnot'],
        transform=valDataset,
        edge=True,
        Enc=Enc),
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_work,
                                            pin_memory=True)

    return trainLoader, valLoader, data