示例#1
0
 def train_dataloader(self):
     multi_dataloader = {
         task:
         torch.utils.data.DataLoader(task_dataset,
                                     batch_size=None,
                                     num_workers=self.hparams.num_workers,
                                     pin_memory=True)
         for task, task_dataset in distill_datasets.items()
     }
     res = MultiTaskDataloader(tau=self.hparams.tau, **multi_dataloader)
     return res
示例#2
0
 def train_dataloader(self):
     multi_dataloader = {
         task:
         torch.utils.data.DataLoader(task_dataset[datasets.Split.TRAIN],
                                     batch_size=self.hparams.batch_size,
                                     collate_fn=collate,
                                     num_workers=self.hparams.num_workers,
                                     pin_memory=True)
         for task, task_dataset in multi_dataset.items()
     }
     res = MultiTaskDataloader(tau=self.hparams.tau, **multi_dataloader)
     return res