예제 #1
0
def test_dataloader(args):
    test_dataset_file = "{0}/test_data_{1}.hdf5".format(args.locations["train_test_datadir"],args.region)
    validation_loader = torch.utils.data.DataLoader(
             data_io.ConcatDataset("test",args.nlevs, test_dataset_file, args.locations['normaliser_loc'], xvars=args.xvars,
             yvars=args.yvars, yvars2=args.yvars2, data_frac=args.data_fraction, add_adv=False),
             batch_size=args.batch_size, shuffle=False)
    return validation_loader
예제 #2
0
 def test_dataloader(self):
     test_dataset_file = "{0}/test_data_{1}.hdf5".format(
         locations["train_test_datadir"], region)
     validation_loader = torch.utils.data.DataLoader(data_io.ConcatDataset(
         "test", nlevs, test_dataset_file, overfit=True),
                                                     batch_size=batch_size,
                                                     shuffle=False)
     return validation_loader
예제 #3
0
def train_dataloader(args):
    train_dataset_file = "{0}/train_data_{1}.hdf5".format(
        args.locations["train_test_datadir"], args.region)
    concatDataset = data_io.ConcatDataset("train",
                                          args.nlevs,
                                          train_dataset_file,
                                          args.locations['normaliser_loc'],
                                          data_frac=args.data_fraction)
    train_loader = torch.utils.data.DataLoader(concatDataset,
                                               batch_size=args.batch_size,
                                               shuffle=True)
    return train_loader, concatDataset