Beispiel #1
0
    def wrap_dataset(self, dset, savedir, args):
        # save the data
        save_bball_data(dset, savedir, args.override_data)

        dset = load_bball_data(savedir, args.target)

        #Deal with uneven lengths breaking DataLoader
        def pad_data(x):
            max_len = max([len(sig[0]) for sig in x])
            new_x = []
            for data in x:
                new_data = []
                for datum in data:
                    datum = np.append(datum, np.zeros(max_len - len(datum)))
                    new_data.append([float(i) for i in datum])
                new_x.append(new_data)
            return new_x

        def my_collate(batch):
            data = [item[0] for item in batch]
            lengths = [len(d[0]) for d in data]
            data = torch.stack(
                [torch.Tensor(x[0]).float() for x in pad_data(data)])
            target = [item[1] for item in batch]
            target = torch.FloatTensor(target)
            lens = torch.LongTensor(lengths)
            return [data, target, lens]

        return DataLoader(dset,
                          batch_size=args.batch_size,
                          collate_fn=my_collate,
                          batch_first=True,
                          shuffle=True,
                          num_workers=args.num_workers)
Beispiel #2
0
    def wrap_dataset(self, dset, savedir, args):
        # save the data
        save_bball_data(dset, savedir, args.override_data)

        # return dataloader of the saved data
        dset = load_bball_data(savedir, args.target)

        return DataLoader(dset,
                          batch_size=args.batch_size,
                          shuffle=True,
                          num_workers=args.num_workers)