Ejemplo n.º 1
0
    def load_data(self, varbose=False, image_only=False, train_data_rate=0.7):

        print('Start loading image files...')
        if image_only:
            paths = glob(os.path.join(args.image_root, '*'))
            print('loaded {} data'.format(len(paths)))

            pivot = int(len(paths) * train_data_rate)
            paths_sep = {'train': paths[:pivot], 'test': paths[pivot:]}
            loader = lambda s: ImageLoader(paths_sep[s],
                                           transform=self.transform[s])

        elif args.one_hot:
            sep_data = pd.read_pickle(args.pkl_path)
            loader = lambda s: ClassImageLoader(paths=sep_data[s],
                                                transform=self.transform[s])

        else:
            df = pd.read_pickle(args.pkl_path)
            print('loaded {} data'.format(len(df)))
            pivot = int(len(df) * train_data_rate)
            df_shuffle = df.sample(frac=1)
            df_sep = {'train': df_shuffle[:pivot], 'test': df_shuffle[pivot:]}
            del df, df_shuffle
            loader = lambda s: FlickrDataLoader(args.image_root,
                                                df_sep[s],
                                                self.cols,
                                                transform=self.transform[s])

        train_set = loader('train')
        test_set = loader('test')
        print('train:{} test:{} sets have already loaded.'.format(
            len(train_set), len(test_set)))
        return train_set, test_set
Ejemplo n.º 2
0
from cunet import Conditional_UNet

if __name__ == '__main__':
    transform = transforms.Compose([
        transforms.Resize((args.input_size, ) * 2),
        transforms.ToTensor(),
        transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
    ])

    s_li = ['sunny', 'cloudy', 'rain', 'snow', 'foggy']
    sep_data = pd.read_pickle(args.pkl_path)
    sep_data = sep_data['test']
    # sep_data = [p for p in sep_data if 'foggy' in p]
    print('loaded {} data'.format(len(sep_data)))

    dataset = ClassImageLoader(paths=sep_data, transform=transform, inf=True)

    loader = torch.utils.data.DataLoader(dataset,
                                         batch_size=args.batch_size,
                                         num_workers=args.num_workers,
                                         drop_last=True)
    random_loader = torch.utils.data.DataLoader(dataset,
                                                batch_size=args.batch_size,
                                                num_workers=args.num_workers,
                                                drop_last=True)

    # load model
    transfer = Conditional_UNet(num_classes=args.num_classes)
    sd = torch.load(args.cp_path)
    transfer.load_state_dict(sd['inference'])
    transfer.eval()
Ejemplo n.º 3
0
    ind_li = []
    for s in s_li:
        ind_li.append(
            [i for i, c in enumerate(p.split('/')[-2] for p in df) if c == s])
    ind_li = np.concatenate([ind[:91] for ind in ind_li])
    print(ind_li.shape)
    df = [df[i] for i in ind_li]
    print('loaded {} data'.format(len(df)))

    transform = transforms.Compose([
        transforms.Resize((args.input_size, ) * 2),
        transforms.ToTensor(),
        transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
    ])

    dataset = ClassImageLoader(paths=df, transform=transform)

    loader = torch.utils.data.DataLoader(dataset,
                                         batch_size=args.batch_size,
                                         num_workers=args.num_workers)
    random_loader = torch.utils.data.DataLoader(dataset,
                                                batch_size=args.batch_size,
                                                num_workers=args.num_workers)

    #load model
    transfer = Conditional_UNet(num_classes=args.num_classes)
    sd = torch.load(args.cp_path)
    transfer.load_state_dict(sd['inference'])

    classifer = torch.load(args.classifer_path)
    classifer.eval()
Ejemplo n.º 4
0
    train_transform = transforms.Compose([
        transforms.Resize((args.input_size, ) * 2),
        transforms.RandomRotation(10),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
    ])

test_transform = transforms.Compose([
    transforms.Resize((args.input_size, ) * 2),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
])
transform = {'train': train_transform, 'test': test_transform}

loader = lambda s: ClassImageLoader(paths=sep_data[s], transform=transform[s])

train_set = loader('train')
test_set = loader('test')

train_loader = torch.utils.data.DataLoader(
    train_set,
    sampler=ImbalancedDatasetSampler(train_set),
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=4)

test_loader = torch.utils.data.DataLoader(
    test_set,
    # sampler=ImbalancedDatasetSampler(test_set),
    batch_size=args.batch_size,