def spellcheck(packets, language, both, threads): """Spell check a t42 stream.""" try: from teletext.spellcheck import spellcheck_packets except ModuleNotFoundError as e: if e.name == 'enchant': raise click.UsageError( f'{e.msg}. PyEnchant is not installed. Spelling checker is not available.' ) else: raise e else: if both: packets, orig_packets = itertools.tee(packets, 2) packets = itermap(spellcheck_packets, packets, threads, language=language) try: while True: yield next(orig_packets) yield next(packets) except StopIteration: pass else: yield from itermap(spellcheck_packets, packets, threads, language=language)
def deconvolve(chunker, mags, rows, pages, subpages, paginate, config, mode, force_cpu, threads, keep_empty, progress, mag_hist, row_hist, err_hist, rejects, tape_format): """Deconvolve raw VBI samples into Teletext packets.""" if keep_empty and paginate: raise click.UsageError("Can't keep empty packets when paginating.") from teletext.vbi.line import process_lines if force_cpu: sys.stderr.write('CUDA disabled by user request.\n') chunks = chunker(config.line_length * np.dtype(config.dtype).itemsize, config.field_lines, config.field_range) if progress: chunks = tqdm(chunks, unit='L', dynamic_ncols=True) if any((mag_hist, row_hist, rejects)): chunks.postfix = StatsList() packets = itermap(process_lines, chunks, threads, mode=mode, config=config, force_cpu=force_cpu, mags=mags, rows=rows, tape_format=tape_format) if progress and rejects: packets = Rejects(packets) chunks.postfix.append(packets) if keep_empty: packets = (p if isinstance(p, Packet) else Packet() for p in packets) else: packets = (p for p in packets if isinstance(p, Packet)) if progress and mag_hist: packets = MagHistogram(packets) chunks.postfix.append(packets) if progress and row_hist: packets = RowHistogram(packets) chunks.postfix.append(packets) if progress and err_hist: packets = ErrorHistogram(packets) chunks.postfix.append(packets) if paginate: for p in pipeline.paginate(packets, pages=pages, subpages=subpages): yield from p else: yield from packets
def split(chunker, outdir, config, threads, progress, rejects): """Split training recording into intermediate bins.""" from teletext.vbi.training import process_training, split chunks = chunker(config.line_length * np.dtype(config.dtype).itemsize, config.field_lines, config.field_range) if progress: chunks = tqdm(chunks, unit='L', dynamic_ncols=True) results = itermap(process_training, chunks, threads, config=config) if progress and rejects: results = Rejects(results) chunks.postfix = StatsList() chunks.postfix.append(results) results = (r for r in results if isinstance(r, tuple)) files = [open(os.path.join(outdir, f'training.{n:02x}.dat'), 'wb') for n in range(256)] split(results, files)
def _crashing_iter(self, n): with self.assertRaises(ValueError): list(itermap(crashy, ([False] * n) + [True], processes=self.procs))
def test_called_once_single(self): result = list( itermap(callcount, [None] * (self.procs + 1), processes=self.procs)) self.assertListEqual(result, [1] * (self.procs + 1))
def test_single(self): input = list(range(100)) expected = list(multiply(input, 3)) result = list(itermap(multiply, input, self.procs, 3)) self.assertListEqual(result, expected)
def test_too_many_args(self): with self.assertRaises(ChildProcessError): list(itermap(multiply, ([False] * 3), self.procs, 3, 4))
def test_not_generator(self): with self.assertRaises(ChildProcessError): list(itermap(not_generator, ([False] * 3), processes=self.procs))
def test_unpickleable_item_in_iter(self): with self.assertRaises(AttributeError): list(itermap(null, ([None] * 10) + [lambda x: x], self.procs, None))
def test_unpickleable_function(self): with self.assertRaises(AttributeError): list(itermap(lambda x: x, ([False] * 3), self.procs))
def test_early_crash(self): with self.assertRaises(ChildProcessError): list(itermap(early_crash, ([False] * 3), self.procs))
def _crashing_iter(self, n): with self.assertRaises(ChildProcessError): list(itermap(crashy, ([False] * n) + [True], self.procs))
def test_early_crash(self): with self.assertRaises(ValueError): list(itermap(early_crash, ([False] * 3), processes=self.procs))
def test_empty_iter(self): result = list(itermap(callcount, [], processes=self.procs)) self.assertListEqual(result, [])