Пример #1
0
def load_data(root, num_seen, batch_size, num_workers):
    """
    Load cifar10 dataset.

    Args
        root(str): Path of dataset.
        num_seen(int): Number of seen classes.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
        query_dataloader, seen_dataloader, unseen_dataloader, retrieval_dataloader(torch.evaluate.data.DataLoader): Data loader.
    """
    CIFAR10.init(root, num_seen)
    query_dataset = CIFAR10('query', transform=query_transform())
    seen_dataset = CIFAR10('seen', transform=train_transform())
    unseen_dataset = CIFAR10('unseen', transform=train_transform())
    retrieval_dataset = CIFAR10('retrieval', transform=train_transform())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    seen_dataloader = DataLoader(
        seen_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    unseen_dataloader = DataLoader(
        unseen_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, seen_dataloader, unseen_dataloader, retrieval_dataloader
Пример #2
0
def load_data(root, batch_size, num_workers):
    NABirds.init(root)
    query_dataset = NABirds(root, 'query', query_transform())
    train_dataset = NABirds(root, 'train', train_transform())
    retrieval_dataset = NABirds(root, 'retrieval', query_transform())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    train_dataloader = DataLoader(
        train_dataset,
        batch_size=batch_size,
        shuffle=True,
        pin_memory=True,
        num_workers=num_workers,
    )
    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, train_dataloader, retrieval_dataloader
Пример #3
0
 def __init__(self, data, targets, root, dataset):
     self.data = data
     self.targets = targets
     self.root = root
     self.transform = train_transform()
     self.dataset = dataset
     self.onehot_targets = self.targets
Пример #4
0
 def __init__(self, data, targets, root, dataset):
     self.data = data
     self.targets = targets
     self.root = root
     self.transform = train_transform()
     self.dataset = dataset
     if dataset == 'cifar-10':
         self.onehot_targets = encode_onehot(self.targets, 10)
     else:
         self.onehot_targets = self.targets
Пример #5
0
def load_data(root, batch_size, num_workers):
    """
    Loading nus-wide dataset.

    Args:
        root(str): Path of image files.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
        train_dataloader, query_dataloader, retrieval_dataloader(torch.utils.data.dataloader.DataLoader): Data loader.
    """
    query_dataloader = DataLoader(
        NusWideDataset(
            root,
            'test_img.txt',
            'test_label_onehot.txt',
            transform=query_transform(),
        ),
        batch_size=batch_size,
        num_workers=num_workers,
        pin_memory=True,
    )

    train_dataloader = DataLoader(
        NusWideDataset(
            root,
            'train_img.txt',
            'train_label_onehot_tc21.txt',
            transform=train_transform(),
        ),
        shuffle=True,
        batch_size=batch_size,
        num_workers=num_workers,
        pin_memory=True,
    )

    retrieval_dataloader = DataLoader(
        NusWideDataset(
            root,
            'database_img.txt',
            'database_label_onehot.txt',
            transform=query_transform(),
        ),
        batch_size=batch_size,
        num_workers=num_workers,
        pin_memory=True,
    )

    return train_dataloader, query_dataloader, retrieval_dataloader
Пример #6
0
def load_data(root, batch_size, num_workers):
    """
    Load cifar-10 dataset.

    Args
        root(str): Path of dataset.
        batch_size(int): Batch size.
        num_workers(int): Number of data loading workers.

    Returns
        train_dataloader, query_dataloader, retrieval_dataloader(torch.utils.data.DataLoader): Data loader.
    """
    root = os.path.join(root, 'images')
    train_dataloader = DataLoader(
        ImagenetDataset(
            os.path.join(root, 'train'),
            transform=train_transform(),
            target_transform=Onehot(10),
        ),
        batch_size=batch_size,
        num_workers=num_workers,
        shuffle=True,
        pin_memory=True,
    )

    query_dataloader = DataLoader(
        ImagenetDataset(
            os.path.join(root, 'query'),
            transform=query_transform(),
            target_transform=Onehot(10),
        ),
        batch_size=batch_size,
        num_workers=num_workers,
        shuffle=False,
        pin_memory=True,
    )

    retrieval_dataloader = DataLoader(
        ImagenetDataset(
            os.path.join(root, 'database'),
            transform=query_transform(),
            target_transform=Onehot(10),
        ),
        batch_size=batch_size,
        num_workers=num_workers,
        shuffle=False,
        pin_memory=True,
    )

    return train_dataloader, query_dataloader, retrieval_dataloader,
Пример #7
0
def load_data(root, num_query, num_train, batch_size, num_workers):
    """
    Load cifar10 dataset.

    Args
        root(str): Path of dataset.
        num_query(int): Number of query data points.
        num_train(int): Number of training data points.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
        query_dataloader, train_dataloader, retrieval_dataloader(torch.evaluate.data.DataLoader): Data loader.
    """
    CIFAR10.init(root, num_query, num_train)
    query_dataset = CIFAR10('query',
                            transform=query_transform(),
                            target_transform=Onehot())
    train_dataset = CIFAR10('train',
                            transform=train_transform(),
                            target_transform=None)
    retrieval_dataset = CIFAR10('database',
                                transform=query_transform(),
                                target_transform=Onehot())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    train_dataloader = DataLoader(
        train_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, train_dataloader, retrieval_dataloader
Пример #8
0
def load_data(root, num_query, num_train, batch_size, num_workers):
    """
    Loading nus-wide dataset.

    Args:
        root(str): Path of image files.
        num_query(int): Number of query data.
        num_train(int): Number of training data.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
        query_dataloader, train_dataloader, retrieval_dataloader (torch.evaluate.data.DataLoader): Data loader.
    """

    Flickr25k.init(root, num_query, num_train)
    query_dataset = Flickr25k(root, 'query', query_transform())
    train_dataset = Flickr25k(root, 'train', train_transform())
    retrieval_dataset = Flickr25k(root, 'retrieval', query_transform())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    train_dataloader = DataLoader(
        train_dataset,
        batch_size=batch_size,
        shuffle=True,
        pin_memory=True,
        num_workers=num_workers,
    )
    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, train_dataloader, retrieval_dataloader
Пример #9
0
def load_data(root, batch_size, num_workers, sampler=None):
    Cub2011.init(root)
    query_dataset = Cub2011(root, 'query', query_transform())
    train_dataset = Cub2011(root, 'train', train_transform())
    retrieval_dataset = Cub2011(root, 'retrieval', query_transform())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    if sampler == 'PK':
        p = 16
        k = 5
        pksampler = PKSampler2(train_dataset, p, k)
        train_dataloader = DataLoader(
            train_dataset,
            batch_size=p * k,
            pin_memory=True,
            num_workers=num_workers,
            sampler=pksampler,
        )
    else:
        train_dataloader = DataLoader(
            train_dataset,
            batch_size=batch_size,
            shuffle=True,
            pin_memory=True,
            num_workers=num_workers,
        )
    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, train_dataloader, retrieval_dataloader
Пример #10
0
def load_data(
    tc,
    root,
    num_query,
    num_train,
    batch_size,
    num_workers,
):
    """
    Loading nus-wide dataset.

    Args:
        tc(int): Top class.
        root(str): Path of image files.
        num_query(int): Number of query data.
        num_train(int): Number of training data.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
        query_dataloader, train_dataloader, retrieval_dataloader(torch.evaluate.data.DataLoader): Data loader.
    """
    if tc == 21:
        query_dataset = NusWideDatasetTC21(
            root,
            'test_img.txt',
            'test_label_onehot.txt',
            transform=query_transform(),
        )

        train_dataset = NusWideDatasetTC21(
            root,
            'database_img.txt',
            'database_label_onehot.txt',
            transform=train_transform(),
            train=True,
            num_train=num_train,
        )

        retrieval_dataset = NusWideDatasetTC21(
            root,
            'database_img.txt',
            'database_label_onehot.txt',
            transform=query_transform(),
        )
    elif tc == 10:
        NusWideDatasetTc10.init(root, num_query, num_train)
        query_dataset = NusWideDatasetTc10(root, 'query', query_transform())
        train_dataset = NusWideDatasetTc10(root, 'train', train_transform())
        retrieval_dataset = NusWideDatasetTc10(root, 'retrieval',
                                               query_transform())

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    train_dataloader = DataLoader(
        train_dataset,
        batch_size=batch_size,
        shuffle=True,
        pin_memory=True,
        num_workers=num_workers,
    )
    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, train_dataloader, retrieval_dataloader
def load_data(root, num_seen, batch_size, num_workers):
    """
    Loading nus-wide dataset.

    Args:
        root(str): Path of image files.
        num_seen(str): Number of classes of seen.
        batch_size(int): Batch size.
        num_workers(int): Number of loading data threads.

    Returns
       query_dataloader, seen_dataloader, unseen_dataloader, retrieval_dataloader(torch.evaluate.data.DataLoader): Data loader.
    """
    NusWideDatasetTC21.init(root, num_seen)
    query_dataset = NusWideDatasetTC21(
        root,
        'query',
        transform=query_transform(),
    )

    retrieval_dataset = NusWideDatasetTC21(
        root,
        'retrieval',
        transform=train_transform(),
    )

    unseen_dataset = NusWideDatasetTC21(
        root,
        'unseen',
        transform=train_transform(),
    )

    seen_dataset = NusWideDatasetTC21(
        root,
        'seen',
        transform=train_transform(),
    )

    query_dataloader = DataLoader(
        query_dataset,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    retrieval_dataloader = DataLoader(
        retrieval_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    unseen_dataloader = DataLoader(
        unseen_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )
    seen_dataloader = DataLoader(
        seen_dataset,
        shuffle=True,
        batch_size=batch_size,
        pin_memory=True,
        num_workers=num_workers,
    )

    return query_dataloader, seen_dataloader, unseen_dataloader, retrieval_dataloader