def initialize(self, source, target, batch_size_source, batch_size_target, scale=32):
        transform = transforms.Compose([
            transforms.Scale(scale),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])

        dataset_source = []
        dataloader_source = []

        self.max_len = 0
        for i in range(len(source)):
            dataset_source.append(Dataset(source[i]['imgs'], source[i]['labels'], transform=transform))
            self.max_len = max(self.max_len, len(dataset_source))
            dataloader_source.append(
                torch.utils.data.DataLoader(dataset_source[i], batch_size=batch_size_source, shuffle=True,
                                            num_workers=4))

        self.dataset_s = dataset_source

        dataset_target = Dataset(target['imgs'], target['labels'], transform=transform)
        self.max_len = max(self.max_len, len(dataset_target))
        dataloader_target = torch.utils.data.DataLoader(dataset_target, batch_size=batch_size_target, shuffle=True,
                                                        num_workers=4)

        self.dataset_t = dataset_target
        self.paired_data = PairedData(dataloader_source, dataloader_target, float("inf"))
    def initialize(self, source, target, batch_size1, batch_size2, scale=32):
        transform = transforms.Compose([
            transforms.Scale(scale),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])
        #dataset_source1 = Dataset(source[1]['imgs'], source['labels'], transform=transform)
        dataset_source1 = Dataset(source[0]['imgs'], source[0]['labels'], transform=transform)
        data_loader_s1 = torch.utils.data.DataLoader(dataset_source1, batch_size=batch_size1, shuffle=True, num_workers=4)
        self.dataset_s1 = dataset_source1

        dataset_source2 = Dataset(source[1]['imgs'], source[1]['labels'], transform=transform)
        data_loader_s2 = torch.utils.data.DataLoader(dataset_source2, batch_size=batch_size1, shuffle=True, num_workers=4)
        self.dataset_s2 = dataset_source2

        dataset_source3 = Dataset(source[2]['imgs'], source[2]['labels'], transform=transform)
        data_loader_s3 = torch.utils.data.DataLoader(dataset_source3, batch_size=batch_size1, shuffle=True, num_workers=4)  
        self.dataset_s3 = dataset_source3      

        dataset_source4 = Dataset(source[3]['imgs'], source[3]['labels'], transform=transform)
        data_loader_s4 = torch.utils.data.DataLoader(dataset_source4, batch_size=batch_size1, shuffle=True, num_workers=4)  
        self.dataset_s4 = dataset_source4     

        #for i in range(len(source)):
        #    dataset_source[i] = Dataset(source[i]['imgs'], source[i]['labels'], transform=transform)
        dataset_target = Dataset(target['imgs'], target['labels'], transform=transform)
        data_loader_t = torch.utils.data.DataLoader(dataset_target, batch_size=batch_size2, shuffle=True, num_workers=4)
        

        self.dataset_t = dataset_target
        self.paired_data = CombinedData(data_loader_s1, data_loader_s2, data_loader_s3,data_loader_s4, data_loader_t,
                                      float("inf"))

        self.num_datasets = 4
        self.num_samples = min(max(len(self.dataset_s1),len(self.dataset_s2),len(self.dataset_s3), len(self.dataset_s4),len(self.dataset_t)), float("inf"))*self.num_datasets
    def __init__(self, source, target, bs_source, bs_target, scale=32):

        transform = transforms.Compose([
            transforms.Resize(scale),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])

        self.dataset_s = Dataset(source['imgs'], source['labels'], transform=transform)
        self.dataset_t = Dataset(target['imgs'], target['labels'], transform=transform)

        loader_s = torch.utils.data.DataLoader(
            self.dataset_s, batch_size=bs_source,
            shuffle=True, num_workers=4)

        loader_t = torch.utils.data.DataLoader(
            self.dataset_t, batch_size=bs_target,
            shuffle=True, num_workers=4)

        self.paired_data = PairedData(loader_s, loader_t, float("inf"))
Example #4
0
    def initialize(self, source, target, batch_size1, batch_size2, scale=32):
        transform = transforms.Compose([
            transforms.Scale(scale),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])
        dataset_source = Dataset(source['imgs'], source['labels'], transform=transform)
        dataset_target = Dataset(target['imgs'], target['labels'], transform=transform)
        data_loader_s = torch.utils.data.DataLoader(
            dataset_source,
            batch_size=batch_size1,
            shuffle=True,
            num_workers=4)

        data_loader_t = torch.utils.data.DataLoader(
            dataset_target,
            batch_size=batch_size2,
            shuffle=True,
            num_workers=4)
        self.dataset_s = dataset_source
        self.dataset_t = dataset_target
        self.paired_data = PairedData(data_loader_s, data_loader_t,
                                      float("inf"))