def test_against_files(self): @register_repr(weakref.ref) def repr_weakref(*_): return '<weakref>' ids = get_call_ids(golden_script.main) calls = [session.query(Call).filter_by(id=c_id).one() for c_id in ids] def normalise_addresses(string): return re.sub(r'at 0x\w+>', 'at 0xABC>', string) data = [ dict( arguments=byteify( json.loads(normalise_addresses(call.arguments))), return_value=byteify(normalise_addresses(call.return_value)), exception=call.exception, traceback=call.traceback, data=normalise_call_data(normalise_addresses(call.data)), ) for call in calls ] version = re.match(r'\d\.\d', sys.version).group() path = os.path.join(os.path.dirname(__file__), 'golden-files', version, 'calls.json') if 1: # change to 0 to write new data instead of reading and testing self.assertEqual(data, byteify(file_to_json(path))) else: json_to_file(data, path)
def test_against_files(self): @register_repr(weakref.ref) def repr_weakref(*_): return '<weakref>' def normalise_addresses(string): return re.sub(r'at 0x\w+>', 'at 0xABC>', string) data = [ dict( arguments=byteify( json.loads(normalise_addresses(call.arguments))), return_value=byteify(normalise_addresses(call.return_value)), exception=call.exception, traceback=call.traceback, data=normalise_call_data(normalise_addresses(call.data)), function=dict( name=byteify(call.function.name), html_body=byteify(call.function.html_body), lineno=call.function.lineno, data=byteify(json.loads(call.function.data)), ), ) for call in golden_calls ] version = re.match(r'\d\.\d', sys.version).group() path = os.path.join(os.path.dirname(__file__), 'golden-files', version, 'calls.json') if 1: # change to 0 to write new data instead of reading and testing self.assertEqual(data, byteify(file_to_json(path))) else: json_to_file(data, path)
def main(): this_dir = Path(__file__).parent locales = this_dir / "locales" result = defaultdict(dict) for lang_dir in locales.iterdir(): mofile = polib.mofile(str(lang_dir / "LC_MESSAGES/futurecoder.mo")) for entry in mofile: if entry.msgid.endswith(".program"): result[entry.msgid][lang_dir.name] = entry.msgstr.splitlines() json_to_file(result, this_dir / "manual_programs.json", indent=4, sort_keys=True)
def main(): print("Generating files...") t.set_language(os.environ.get("FUTURECODER_LANGUAGE", "en")) json_to_file(list(load_chapters()), frontend_src / "chapters.json") json_to_file(get_pages(), frontend_src / "book/pages.json.load_by_url") json_to_file(dict(frontend_terms()), frontend_src / "terms.json", indent=4) birdseye_dest = frontend / "public/birdseye" shutil.rmtree(birdseye_dest, ignore_errors=True) shutil.copytree(Path(birdseye.__file__).parent / "static", birdseye_dest, dirs_exist_ok=True) roots = get_roots() core_imports = "\n".join(roots) core_imports_path = this_dir / "core_imports.txt" if os.environ.get("FIX_CORE_IMPORTS"): core_imports_path.write_text(core_imports) elif core_imports_path.read_text() != core_imports: raise ValueError( f"core_imports.txt is out of date, run with FIX_CORE_IMPORTS=1.\n" f"{core_imports}\n!=\n{core_imports_path.read_text()}") with tarfile.open(frontend_src / "python_core.tar.load_by_url", "w") as tar: tar.add(this_dir, arcname=this_dir.stem, recursive=True, filter=tarfile_filter) if t.current_language not in (None, "en"): for arcname in [ f"translations/locales/{t.current_language}", f"translations/codes.json", ]: tar.add(this_dir.parent / arcname, arcname=arcname, recursive=True, filter=tarfile_filter) arcname = f"friendly_traceback/locales/{t.current_language}/LC_MESSAGES/friendly_tb_{t.current_language}.mo" tar.add(Path(site_packages) / arcname, arcname=arcname) for root in roots: tar.add( Path(site_packages) / root, arcname=root, recursive=True, filter=tarfile_filter, ) for filename in ("__init__.py", "_mapping.py", "python.py"): arcname = str("pygments/lexers/" + filename) tar.add(Path(site_packages) / arcname, arcname=arcname) print("Done.")