def __init__(self, dataset, batch_size=128, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=my_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): DataLoader.__init__(self, dataset, batch_size=128, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=my_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None) self.collate_fn = collate_fn
def __init__(self, dataset, batch_size, num_workers=0, shuffle=True): DataLoader.__init__(self, dataset, batch_size, num_workers=num_workers, shuffle=shuffle, drop_last=True)
def __init__(self, dtset, batch_size, shuffle=False): """Construtor Parameters ---------- dtset : a dataset batch_size : size of a minibatch shuffle : bool, if we shuffle the data """ if batch_size <= 0: batch_size = len(dtset) self.dtset = dtset if isinstance(dtset, subFactory): sampler = SubsetRandomSampler(range(dtset.startind, dtset.finalind)) DataLoader.__init__(self, dtset, batch_size, shuffle=False, sampler=sampler) else: DataLoader.__init__(self, dtset, batch_size, shuffle=shuffle) if hasattr(dtset, 'xmean'): self.xmean = dtset.xmean self.xstd = dtset.xstd if hasattr(dtset, 'ymean'): self.ymean = dtset.ymean self.ystd = dtset.ystd if hasattr(dtset, '_xname'): self._xname = dtset._xname if hasattr(dtset, '_yname'): self._yname = dtset._yname
def __init__( self, dataset: ConcatDataset, batch_size=1, shuffle=False, drop_last=False, device=None, spawn=False, pass_pid=False, **otherconf ): if spawn: raise NotImplementedError DeviceAwareness.__init__(self, device) sampler_cls = ChainSubsetRandomSampler if shuffle else SequentialSampler DataLoader.__init__( self, dataset, batch_size, **otherconf, pin_memory=False, drop_last=drop_last, collate_fn=lambda x: fixCollate(x, pass_pid), sampler=sampler_cls(dataset), num_workers=mp.cpu_count() if spawn else 0, )
def __init__( self, dataset: ConcatDataset, distrib_title: str, batchsize_k=1, shuffle=False, device=None, spawn=False, **otherconf ): if spawn: raise NotImplementedError assert "drop_last" not in otherconf DeviceAwareness.__init__(self, device) DataLoader.__init__( self, dataset, **otherconf, pin_memory=False, collate_fn=fixCollate, num_workers=mp.cpu_count() if spawn else 0, batch_sampler=DistributedSampler( dataset, distrib_title, batchsize_k, device, shuffle ), )
def __init__(self, dataset, batch_size=1, num_workers=0): DataLoader.__init__( self, dataset, batch_size=batch_size, shuffle=True, collate_fn=collate_as_tensor, num_workers=num_workers, drop_last=True, )
def __init__(self, *args, **kwargs): kwargs['collate_fn'] = collate_by_input_length DataLoader.__init__(self, *args, **kwargs)
def __init__(self, dataset, batch_size=4, shuffle=True): DataLoader.__init__(self, dataset, batch_size=batch_size, shuffle=shuffle, collate_fn=collate_fn)
def __init__(self, n_class, n_shot, n_query, *args, **kwargs): self.n_class = n_class self.n_shot = n_shot self.n_query = n_query DataLoader.__init__(self, *args, **kwargs)