def test_valid_output(self): with open(TESTFILE) as fp: tokensource = list(PythonLexer().get_tokens(fp.read())) fmt = LatexFormatter(full=True, encoding='latin1') handle, pathname = tempfile.mkstemp('.tex') # place all output files in /tmp too old_wd = os.getcwd() os.chdir(os.path.dirname(pathname)) tfile = os.fdopen(handle, 'wb') fmt.format(tokensource, tfile) tfile.close() try: import subprocess po = subprocess.Popen( ['latex', '-interaction=nonstopmode', pathname], stdout=subprocess.PIPE) ret = po.wait() output = po.stdout.read() po.stdout.close() except OSError as e: # latex not available raise support.SkipTest(e) else: if ret: print(output) self.assertFalse(ret, 'latex run reported errors') os.unlink(pathname) os.chdir(old_wd)
def check_lexer(lx, fn): if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON: raise support.SkipTest('%s is a known bad file on Jython' % fn) absfn = os.path.join(TESTDIR, 'examplefiles', fn) with open(absfn, 'rb') as fp: text = fp.read() text = text.replace(b'\r\n', b'\n') text = text.strip(b'\n') + b'\n' try: text = text.decode('utf-8') if text.startswith(u'\ufeff'): text = text[len(u'\ufeff'):] except UnicodeError: text = text.decode('latin1') ntext = [] tokens = [] import time t1 = time.time() for type, val in lx.get_tokens(text): ntext.append(val) assert type != Error, \ 'lexer %s generated error token for %s: %r at position %d' % \ (lx, absfn, val, len(u''.join(ntext))) tokens.append((type, val)) t2 = time.time() STATS[os.path.basename(absfn)] = (len(text), 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text)) if u''.join(ntext) != text: print('\n'.join( difflib.unified_diff(u''.join(ntext).splitlines(), text.splitlines()))) raise AssertionError('round trip failed for ' + absfn) # check output against previous run if enabled if STORE_OUTPUT: # no previous output -- store it outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn) if not os.path.isfile(outfn): with open(outfn, 'wb') as fp: pickle.dump(tokens, fp) return # otherwise load it and compare with open(outfn, 'rb') as fp: stored_tokens = pickle.load(fp) if stored_tokens != tokens: f1 = pprint.pformat(stored_tokens) f2 = pprint.pformat(tokens) print('\n'.join( difflib.unified_diff(f1.splitlines(), f2.splitlines()))) assert False, absfn
def verify(formatter): try: inst = formatter(encoding=None) except (ImportError, FontNotFound) as e: # some dependency or font not installed raise support.SkipTest(e) if formatter.name != 'Raw tokens': out = format(tokens, inst) if formatter.unicodeoutput: assert type(out) is text_type, '%s: %r' % (formatter, out) inst = formatter(encoding='utf-8') out = format(tokens, inst) assert type(out) is bytes, '%s: %r' % (formatter, out) # Cannot test for encoding, since formatters may have to escape # non-ASCII characters. else: inst = formatter() out = format(tokens, inst) assert type(out) is bytes, '%s: %r' % (formatter, out)
def verify(formatter): info = formatters.FORMATTERS[formatter.__name__] assert len(info) == 5 assert info[1], "missing formatter name" assert info[2], "missing formatter aliases" assert info[4], "missing formatter docstring" try: inst = formatter(opt1="val1") except (ImportError, FontNotFound) as e: raise support.SkipTest(e) try: inst.get_style_defs() except NotImplementedError: # may be raised by formatters for which it doesn't make sense pass if formatter.unicodeoutput: inst.format(ts, string_out) else: inst.format(ts, bytes_out)