Exemple #1
0
    def _prepare_data(self):

        params = self.params

        train_set = mdl.get_dataset("MNIST", split="train")
        valid_set = mdl.get_dataset("MNIST", split="valid")

        def get_loader(dataset):
            l = torch.utils.data.DataLoader(
                dataset,
                batch_size=param.batch_size,
                drop_last=True,
                shuffle=True,
                num_workers=4,
            )
            return l

        train_loader = get_loader(train_set)
        valid_loader = get_loader(valid_set)

        params.len_train_batch = len(train_loader)
        params.len_test_batch = len(train_loader)

        iters = {
            "train": ELoaderIter(train_loader),
            "valid": ELoaderIter(valid_loader),
        }

        return None, iters
Exemple #2
0
    def _prepare_data(self):
        '''
            prepare your dataset here
            and return a iterator dic
        '''
        params = self.params

        train_set = mdl.get_dataset("MNIST", split="train")
        valid_set = mdl.get_dataset("MNIST", split="valid")

        def get_loader(dataset):
            l = torch.utils.data.DataLoader(
                dataset,
                batch_size=param.batch_size,
                drop_last=True,
                shuffle=True,
                num_workers=4,
            )
            return l

        train_loader = get_loader(train_set)
        valid_loader = get_loader(valid_set)

        iters = {
            "train": ELoaderIter(train_loader),
            "valid": ELoaderIter(valid_loader),
        }

        return None, iters
Exemple #3
0
 def get_set(dataset, domain, split):
     if dataset is None or dataset == "NONE":
         dataset = mdl.get_dataset(
             dataset=domain, domain=None, split=split
         )
     else:
         dataset = mdl.get_dataset(
             dataset=dataset, domain=domain, split=split
         )
     return dataset
Exemple #4
0
    def _prepare_data(self):

        params = self.params

        dataset = params.dataset
        source = params.source
        target = params.target

        def get_set(dataset, domain, split):
            if dataset is None or dataset == "NONE":
                dataset = mdl.get_dataset(
                    dataset=domain, domain=None, split=split
                )
            else:
                dataset = mdl.get_dataset(
                    dataset=dataset, domain=domain, split=split
                )
            return dataset

        train_S_set = mdl.get_dataset(dataset, source, split="train")
        train_T_set = mdl.get_dataset(dataset, target, split="train")
        valid_set = mdl.get_dataset(dataset, target, split="test")

        def get_loader(dataset, shuffle, drop_last, batch_size=None):
            batch_size = (
                params.batch_size if batch_size is None else batch_size
            )
            l = torch.utils.data.DataLoader(
                dataset,
                batch_size=params.batch_size,
                drop_last=drop_last,
                shuffle=shuffle,
            )
            return l

        train_S_l = get_loader(train_S_set, shuffle=True, drop_last=True)
        train_T_l = get_loader(train_T_set, shuffle=True, drop_last=True)
        valid_l = get_loader(
            valid_set,
            shuffle=True,
            drop_last=True,
            batch_size=params.batch_size / 2,
        )

        iters = {
            "train": {
                "S": ELoaderIter(train_S_l),
                "T": ELoaderIter(train_T_l),
            },
            "valid": ELoaderIter(valid_l),
        }

        return None, iters