def _flush_body(self): """ Discard self.body but consume any generator such that any finalization can occur, such as is required by caching.tee_output(). """ consume(iter(self.body))
def wait_for_power_status_change(): EVT_POWER_STATUS_CHANGE = 10 def not_power_status_change(evt): return evt.EventType != EVT_POWER_STATUS_CHANGE events = get_power_management_events() consume(itertools.takewhile(not_power_status_change, events))
def run(self): files = self.gfs.list() bar = progress.TargetProgressBar(len(files)) processed_files = map(self.process, bar.iterate(files)) errors = filter(None, processed_files) counter = Counter(errors) consume(map(self.handle_trap, counter)) return counter
def run(self): files = self.source_coll.files.find(self.filter, batch_size=1) limit_files = itertools.islice(files, self.limit) count = min(files.count(), self.limit or float('inf')) bar = progress.TargetProgressBar(count) to_process = map(self.process, bar.iterate(limit_files)) consume(to_process)
def save(self, words): """ Save these words as encountered. """ # TODO: Need to cap the network, expire old words/phrases initial = None, all_words = itertools.chain(initial, words) consume(itertools.starmap(self.update, pairwise(all_words)))
def test_lines_completes(gzip_stream): """ When reading lines from a gzip stream, the operation should complete when the stream is exhausted. """ chunks = gzip.read_chunks(gzip_stream) streams = gzip.load_streams(chunks) lines = flatten(map(gzip.lines_from_stream, streams)) consume(lines)
def run(self, bar=progress.TargetProgressBar): files = self.source_coll.files.find( self.filter, batch_size=1, ) limit_files = itertools.islice(files, self.limit) count = min(files.count(), self.limit or float('inf')) progress = bar(count).iterate if bar else iter with SignalTrap(progress(limit_files)) as items: consume(map(self.process, items))
def run( input: compose(csv.DictReader, open) = csv.DictReader(sys.stdin), output: compose(DictWriter, write) = DictWriter(sys.stdout), skip: int = 3, ): """ Resolve sales from transactions using LIFO strategy. """ output.writer.writerows(itertools.islice(input.reader, skip)) output.fieldnames = ['Lot'] + input.fieldnames + [Lots.basis_field] output.writeheader() consume(map(output.writerow, Lots(input)))
def process_forever(self, timeout=0.2): """Run an infinite loop, processing data from connections. This method repeatedly calls process_once. Arguments: timeout -- Parameter to pass to process_once. """ # This loop should specifically *not* be mutex-locked. # Otherwise no other thread would ever be able to change # the shared state of a Reactor object running this function. log.debug("process_forever(timeout=%s)", timeout) one = functools.partial(self.process_once, timeout=timeout) consume(infinite_call(one))
def init_environment(): if platform.system() != 'Darwin': return lib = path('/Applications/MakeMKV.app/Contents/lib/libmmbd.dylib') if not lib.isfile(): print("Need to install MakeMKV", file=sys.stderr) raise SystemExit(1) root = path('~/lib').expanduser() root.makedirs_p() link_names = 'libaacs.dylib', 'libbdplus.dylib' consume( lib.symlink(link) for link in map(root.joinpath, link_names) if not link.exists() ) try: ctypes.CDLL('/usr/local/lib/libdvdcss.2.dylib') except Exception: msg = "Need libdvdcss (brew install libdvdcss)" print(msg, file=sys.stderr) raise SystemExit(1)
def portscan_hosts(hosts, *args, **kargs): consume(map(lambda h: portscan(h, *args, **kargs), hosts))
def set_root(self, root): consume(f.set_root(root) for f in self.filters)
def consume(self, n: Optional[int] = None) -> "ChainedIterable[_T]": consume(self._iterable, n=n) return self