def test_steps(): lang = os.environ.get("FUTURECODER_LANGUAGE", "en") t.set_language(lang) list(load_chapters()) runner = FullRunner(filename="/my_program.py") transcript = [] for page, step, substep, entry in step_test_entries(): program = substep.program is_message = substep != step output_parts = [] input_callback = make_test_input_callback(step.stdin_input) def callback(event_type, data): if event_type == "input": return input_callback(data) elif event_type == "output": output_parts.extend(data["parts"]) step.pre_run(runner) response = check_entry(entry, callback, runner) response["output_parts"] = output_parts normalise_response(response, is_message, substep) transcript_item = dict( program=program.splitlines(), page=page.title, step=step.__name__, response=response, ) transcript.append(transcript_item) if step.get_solution and not is_message: get_solution = "".join(step.get_solution["tokens"]) assert "def solution(" not in get_solution assert "returns_stdout" not in get_solution assert get_solution.strip() in program transcript_item["get_solution"] = get_solution.splitlines() if step.parsons_solution: is_function = transcript_item["get_solution"][0].startswith( "def ") assert len(step.get_solution["lines"]) >= 4 + is_function assert response["passed"] == (not is_message) dirpath = Path(__file__).parent / "golden_files" / lang dirpath.mkdir(parents=True, exist_ok=True) path = dirpath / "test_transcript.json" if os.environ.get("FIX_TESTS", 0): dump = json.dumps(transcript, indent=4, sort_keys=True) path.write_text(dump) else: assert transcript == json.loads(path.read_text())
def main(): print("Generating files...") t.set_language(os.environ.get("FUTURECODER_LANGUAGE", "en")) json_to_file(list(load_chapters()), frontend_src / "chapters.json") json_to_file(get_pages(), frontend_src / "book/pages.json.load_by_url") json_to_file(dict(frontend_terms()), frontend_src / "terms.json", indent=4) birdseye_dest = frontend / "public/birdseye" shutil.rmtree(birdseye_dest, ignore_errors=True) shutil.copytree(Path(birdseye.__file__).parent / "static", birdseye_dest, dirs_exist_ok=True) roots = get_roots() core_imports = "\n".join(roots) core_imports_path = this_dir / "core_imports.txt" if os.environ.get("FIX_CORE_IMPORTS"): core_imports_path.write_text(core_imports) elif core_imports_path.read_text() != core_imports: raise ValueError( f"core_imports.txt is out of date, run with FIX_CORE_IMPORTS=1.\n" f"{core_imports}\n!=\n{core_imports_path.read_text()}") with tarfile.open(frontend_src / "python_core.tar.load_by_url", "w") as tar: tar.add(this_dir, arcname=this_dir.stem, recursive=True, filter=tarfile_filter) if t.current_language not in (None, "en"): for arcname in [ f"translations/locales/{t.current_language}", f"translations/codes.json", ]: tar.add(this_dir.parent / arcname, arcname=arcname, recursive=True, filter=tarfile_filter) arcname = f"friendly_traceback/locales/{t.current_language}/LC_MESSAGES/friendly_tb_{t.current_language}.mo" tar.add(Path(site_packages) / arcname, arcname=arcname) for root in roots: tar.add( Path(site_packages) / root, arcname=root, recursive=True, filter=tarfile_filter, ) for filename in ("__init__.py", "_mapping.py", "python.py"): arcname = str("pygments/lexers/" + filename) tar.add(Path(site_packages) / arcname, arcname=arcname) print("Done.")
def test_set_language_cookie_not_accepted(monkeypatch, req): req.Cookies["language"] = "no" monkeypatch.setattr(core.config, "languages", ["en", "de"]) assert set_language(req) == "en"
def test_set_language_accept_header_not_accepted(monkeypatch, req): req.headers["accept-language"] = "no" monkeypatch.setattr(core.config, "languages", ["en", "de"]) assert set_language(req) == "en"
def test_set_language_default_multiple(monkeypatch, req): monkeypatch.setattr(core.config, "languages", ["en", "de"]) assert set_language(req) == "en"
def set_lang(req, *args): set_language(req)