def test_dataloader_batched(args): test_dataset_file = "{0}/test_data_{1}.hdf5".format(args.locations["train_test_datadir"],args.region) test_dataset = data_io_batched.ConcatDataset("test",args.nlevs, test_dataset_file, args.locations['normaliser_loc'], args.batch_size, xvars=args.xvars, yvars=args.yvars, yvars2=args.yvars2, samples_frac=args.samples_fraction, data_frac=args.data_fraction, no_norm=args.no_norm) indices = list(range(test_dataset.__len__())) test_sampler = torch.utils.data.SubsetRandomSampler(indices) validation_loader = torch.utils.data.DataLoader(test_dataset, batch_sampler=None, batch_size=None, sampler=test_sampler) return validation_loader
def train_dataloader(args): train_dataset_file = "{0}/train_data_{1}.hdf5".format( args.locations["train_test_datadir"], args.region) train_dataset = data_io.ConcatDataset("train", args.nlevs, train_dataset_file, args.locations['normaliser_loc'], args.batch_size, xvars=args.xvars, yvars=args.yvars, xvars2=args.xvars2, samples_frac=args.samples_fraction, data_frac=args.data_fraction, no_norm=args.no_norm, fmin=args.fmin, fmax=args.fmax) indices = list(range(train_dataset.__len__())) train_sampler = torch.utils.data.SubsetRandomSampler(indices) train_loader = torch.utils.data.DataLoader(train_dataset, batch_sampler=None, batch_size=None, sampler=train_sampler, shuffle=False) return train_loader