def transform_stream(self, stream): with closing_if_closable(stream): for obj in stream: if not self.function(obj): continue yield obj
def _fill_queue(self, stream: Stream): try: with closing_if_closable(stream): for obj in stream: self.queue.put(obj) finally: self.queue.put(self._sentinel)
def transform_stream(self, stream: Stream) -> Stream: """Transform a stream.""" with closing_if_closable(stream): # Initial approximation # TODO: This does not work correctly in situations when transform_stream # is called repeatedly, e.g. ParallelPipeline. if self.median is None: yield from self._initialize_median(stream) # Process for obj in stream: value = self.prepare_input(obj, "value") # Update according to Mcfarlane & Schofield mask = value > self.median if np.isscalar(mask): self.median += mask else: self.median[mask] += 1 mask = value < self.median if np.isscalar(mask): self.median -= mask else: self.median[mask] -= 1 yield self.prepare_output(obj, self.median) self.after_stream()
def transform_stream(self, stream: Stream): with closing_if_closable(stream): for obj in stream: print("Stream object at 0x{:x}".format(id(obj))) for outp in self.args: print("{}: ".format(outp.name), end="") pprint.pprint(obj[outp]) yield obj
def transform_stream(self, stream): """Transform a stream.""" with closing_if_closable(stream): for obj in stream: iterable = self.prepare_input(obj, "iterable") for value in iterable: yield self.prepare_output(obj.copy(), value)
def transform_stream(self, stream: Stream): with closing_if_closable(stream): stream_estimator = StreamEstimator() for obj in stream: with stream_estimator.incoming_object(obj.n_remaining_hint): if not self.predicate(obj): continue obj.n_remaining_hint = stream_estimator.emit() yield obj
def transform_stream(self, stream): with closing_if_closable(stream), self._tqdm.tqdm(stream) as progress: if self.monitor_interval is not None: progress.monitor_interval = self.monitor_interval for obj in progress: description = self.prepare_input(obj, "description") if description: progress.set_description(description) yield obj
def transform_stream(self, stream: Stream): """Transform a stream.""" with closing_if_closable(stream): stream_estimator = StreamEstimator() for obj in stream: collection = tuple(self.prepare_input(obj, "collection")) with stream_estimator.incoming_object(obj.n_remaining_hint, len(collection)): for value in collection: yield self.prepare_output( obj.copy(), value, n_remaining_hint=stream_estimator.emit())
def transform_stream(self, stream: Stream) -> Stream: with picamera_camera.PiCamera(**self.kwargs) as cam: resolution = picamera_array.raw_resolution(cam.resolution) with closing_if_closable(stream): for obj in stream: output = np.empty(resolution[::-1] + (3, ), dtype=np.uint8) # Capture continously for _ in cam.capture_continuous(output, format="rgb"): self.prepare_output(obj, output.copy()) yield obj self.after_stream()
def transform_stream(self, stream: Stream): with closing_if_closable(stream), tqdm.tqdm(stream) as progress: if self.monitor_interval is not None: progress.monitor_interval = self.monitor_interval for n_processed, obj in enumerate(progress): description = self.prepare_input(obj, "description") if description: progress.set_description(description) if obj.n_remaining_hint is not None: progress.total = n_processed + obj.n_remaining_hint yield obj
def transform_stream(self, stream: Stream) -> Stream: stream = check_stream(stream) obj_queue = deque() response_queue = deque() with closing_if_closable(stream): # Lead-in: Initialize filter for _ in range(self.size): obj = next(stream) obj_queue.append(obj) value = self.prepare_input(obj, "value") response = self._update(value) response_queue.append(response) if self.centered: for _ in range(self.size // 2): response_queue.popleft() # Normal operation for obj in stream: obj_queue.append(obj) value = self.prepare_input(obj, "value") response_queue.append(self._update(value)) obj = obj_queue.popleft() response = response_queue.popleft() yield self.prepare_output(obj, response) # Lead-out: Yield rest of the queue, invalidating old filter responses while obj_queue: obj = obj_queue.popleft() response = response_queue.popleft() yield self.prepare_output(obj, response) response_queue.append(self._update(None)) assert not obj_queue
def _queue_filler(): try: with closing_if_closable(stream): for i, obj in enumerate(stream): # Send objects to workers in a round-robin fashion worker_idx = i % self.num_workers if not _put_until_stop( input_queues[worker_idx], stop_event, obj ): return # Tell all workers to stop working for iqu in input_queues: if not _put_until_stop(iqu, stop_event, _Signal.END): return except Exception as exc: # Stop everything immediately stop_event.set() print("ParallelPipeline._queue_filler", exc) upstream_exception.append(ExceptionWrapper(exc))
def transform_stream(self, stream: Stream): with closing_if_closable(stream): for obj in stream: yield StreamObject( {k: v for k, v in obj.items() if k in self.keys})
def transform_stream(self, stream: Stream): with closing_if_closable(stream): for i, obj in enumerate(stream, start=self.start): yield self.prepare_output(obj, i)
def transform_stream(self, stream: Stream): with closing_if_closable(stream): for obj in itertools.islice(stream, *self.args): yield obj