Пример #1
0
 def __init__(
     self,
     urls,
     *,
     length=None,
     open_fn=gopen.reader,
     handler=reraise_exception,
     tarhandler=None,
     prepare_for_worker=True,
     initial_pipeline=None,
     shard_selection=worker_urls,
 ):
     tarhandler = handler if tarhandler is None else tarhandler
     IterableDataset.__init__(self)
     SampleIterator.__init__(
         self,
         initial_pipeline=initial_pipeline,
         tarhandler=tarhandler,
         open_fn=open_fn,
     )
     if isinstance(urls, str):
         urls = list(braceexpand.braceexpand(urls))
     self.urls = urls
     self.length = length
     self.handler = handler
     self.total = 0
     self.reseed_hook = do_nothing
     self.node_selection = identity
     self.shard_selection = shard_selection
     self.shard_shuffle = identity
Пример #2
0
 def __init__(
     self, dataset=None, workers=4, output_size=100, pin_memory=True, prefetch=-1
 ):
     IterableDataset.__init__(self)
     omp_warning()
     self.output_queue = mp.Queue(output_size)
     self.pin_memory = pin_memory
     self.jobs = []
     for i in range(workers):
         job = mp.Process(
             target=_parallel_job,
             args=(dataset, i, workers, prefetch, self.output_queue),
             daemon=True,
         )
         self.jobs.append(job)
         job.start()
     D("started")
Пример #3
0
 def __init__(self):
     IterableDataset.__init__(self)
     self.images_and_density_maps = pipeline_results
     self.image_transform = torch_transforms.Compose([
         torch_transforms.ToTensor()
     ])