def test_url_assets(asset_type) -> None: rule_runner = RuleRunner( rules=[ *target_type_rules(), *pex_from_targets.rules(), *package_pex_binary.rules(), *run_pex_binary.rules(), *python_target_type_rules.rules(), *run.rules(), ], target_types=[ FileTarget, ResourceTarget, PythonSourceTarget, PexBinary ], objects={"http_source": HTTPSource}, ) http_source_info = ( 'url="https://raw.githubusercontent.com/python/cpython/7e46ae33bd522cf8331052c3c8835f9366599d8d/Lib/antigravity.py",' "len=500," 'sha256="8a5ee63e1b79ba2733e7ff4290b6eefea60e7f3a1ccb6bb519535aaf92b44967"' ) rule_runner.write_files({ "assets/BUILD": dedent(f"""\ {asset_type}( name='antigravity', source=http_source( {http_source_info}, ), ) {asset_type}( name='antigravity_renamed', source=http_source( {http_source_info}, filename="antigravity_renamed.py", ), ) """), "app/app.py": textwrap.dedent("""\ import pathlib assets_path = pathlib.Path(__file__).parent.parent / "assets" for path in assets_path.iterdir(): print(path.name) assert "https://xkcd.com/353/" in path.read_text() """), "app/BUILD": textwrap.dedent("""\ python_source( source="app.py", dependencies=[ "assets:antigravity", "assets:antigravity_renamed", ] ) pex_binary(name="app.py", entry_point='app.py') """), }) with mock_console(rule_runner.options_bootstrapper) as (console, stdout_reader): rule_runner.run_goal_rule( run.Run, args=["app/app.py"], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) stdout = stdout_reader.get_stdout() assert "antigravity.py" in stdout assert "antigravity_renamed.py" in stdout
def rule_runner() -> RuleRunner: return RuleRunner( rules=[*count_loc.rules(), *external_tool.rules()], target_types=[PythonLibrary, ElixirTarget], )
def test_archive() -> None: """Integration test for the `archive` target type. This tests some edges: * Using both `files` and `relocated_files`. * An `archive` containing another `archive`. """ rule_runner = RuleRunner( rules=[ *target_type_rules(), *pex_from_targets.rules(), *package_pex_binary.rules(), *python_target_type_rules.rules(), QueryRule(BuiltPackage, [ArchiveFieldSet]), ], target_types=[ ArchiveTarget, FilesGeneratorTarget, RelocatedFiles, PexBinary ], ) rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) rule_runner.write_files({ "resources/d1.json": "{'k': 1}", "resources/d2.json": "{'k': 2}", "resources/BUILD": dedent("""\ files(name='original_files', sources=['*.json']) relocated_files( name='relocated_files', files_targets=[':original_files'], src="resources", dest="data", ) """), "project/app.py": "print('hello world!')", "project/BUILD": "pex_binary(entry_point='app.py')", "BUILD": dedent("""\ archive( name="archive1", packages=["project"], files=["resources:original_files"], format="zip", ) archive( name="archive2", packages=[":archive1"], files=["resources:relocated_files"], format="tar", output_path="output/archive2.tar", ) """), }) def get_archive(target_name: str) -> FileContent: tgt = rule_runner.get_target(Address("", target_name=target_name)) built_package = rule_runner.request(BuiltPackage, [ArchiveFieldSet.create(tgt)]) digest_contents = rule_runner.request(DigestContents, [built_package.digest]) assert len(digest_contents) == 1 return digest_contents[0] def assert_archive1_is_valid(zip_bytes: bytes) -> None: io = BytesIO() io.write(zip_bytes) with zipfile.ZipFile(io) as zf: assert set(zf.namelist()) == { "resources/d1.json", "resources/d2.json", "project/project.pex", } with zf.open("resources/d1.json", "r") as f: assert f.read() == b"{'k': 1}" with zf.open("resources/d2.json", "r") as f: assert f.read() == b"{'k': 2}" archive1 = get_archive("archive1") assert_archive1_is_valid(archive1.content) archive2 = get_archive("archive2") assert archive2.path == "output/archive2.tar" io = BytesIO() io.write(archive2.content) io.seek(0) with tarfile.open(fileobj=io, mode="r:") as tf: assert set( tf.getnames()) == {"data/d1.json", "data/d2.json", "archive1.zip"} def get_file(fp: str) -> bytes: reader = tf.extractfile(fp) assert reader is not None return reader.read() assert get_file("data/d1.json") == b"{'k': 1}" assert get_file("data/d2.json") == b"{'k': 2}" assert_archive1_is_valid(get_file("archive1.zip"))
def test_no_sources_exits_gracefully(rule_runner: RuleRunner) -> None: py_dir = "src/py/foo" rule_runner.write_files({f"{py_dir}/BUILD": "python_library(sources=[])"}) result = rule_runner.run_goal_rule(CountLinesOfCode, args=[py_dir]) assert result == GoalRuleResult.noop()
def rule_runner() -> RuleRunner: return RuleRunner(rules=[SubsystemRule(PythonSetup), QueryRule(ScopedOptions, (Scope,))])
def test_expand_interpreter_search_paths() -> None: local_pyenv_version = "3.5.5" all_python_versions = [ "2.7.14", local_pyenv_version, "3.7.10", "3.9.4", "3.9.5" ] asdf_home_versions = [0, 1, 2] asdf_local_versions = [2, 1, 4] asdf_local_versions_str = " ".join( materialize_indices(all_python_versions, asdf_local_versions)) RuleRunner().write_files({ ".python-version": f"{local_pyenv_version}\n", ".tool-versions": ("nodejs 16.0.1\n" "java current\n" f"python {asdf_local_versions_str}\n" "rust 1.52.0\n"), }) with setup_pexrc_with_pex_python_path(["/pexrc/path1:/pexrc/path2"]): with fake_asdf_root(all_python_versions, asdf_home_versions, asdf_local_versions) as ( home_dir, asdf_dir, expected_asdf_paths, expected_asdf_home_paths, expected_asdf_local_paths, ), fake_pyenv_root(all_python_versions, local_pyenv_version) as ( pyenv_root, expected_pyenv_paths, expected_pyenv_local_paths, ): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<ASDF>", "<ASDF_LOCAL>", "<PYENV>", "<PYENV_LOCAL>", "/qux", ] env = Environment({ "HOME": home_dir, "PATH": "/env/path1:/env/path2", "PYENV_ROOT": pyenv_root, "ASDF_DATA_DIR": asdf_dir, }) expanded_paths = PythonBootstrap._expand_interpreter_search_paths( paths, env, ) expected = [ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz", *expected_asdf_home_paths, *expected_asdf_local_paths, *expected_pyenv_paths, *expected_pyenv_local_paths, "/qux", ] assert expected == expanded_paths
def rule_runner() -> RuleRunner: return RuleRunner( rules=[QueryRule(Targets, (Specs,))], target_types=[PythonRequirementLibrary, PythonRequirementsFile], context_aware_object_factories={"python_requirements": PythonRequirements}, )
def test_thirdparty_plugin(rule_runner: RuleRunner) -> None: rule_runner.add_to_build_file( "", dedent( """\ python_requirement_library( name='django', requirements=['Django==2.2.5'], ) """ ), ) rule_runner.create_file( f"{PACKAGE}/settings.py", dedent( """\ from django.urls import URLPattern DEBUG = True DEFAULT_FROM_EMAIL = "*****@*****.**" SECRET_KEY = "not so secret" MY_SETTING = URLPattern(pattern="foo", callback=lambda: None) """ ), ) rule_runner.create_file( f"{PACKAGE}/app.py", dedent( """\ from django.utils import text assert "forty-two" == text.slugify("forty two") assert "42" == text.slugify(42) """ ), ) rule_runner.add_to_build_file(PACKAGE, "python_library()") package_tgt = rule_runner.get_target(Address(PACKAGE)) config_content = dedent( """\ [mypy] plugins = mypy_django_plugin.main [mypy.plugins.django-stubs] django_settings_module = project.settings """ ) result = run_mypy( rule_runner, [package_tgt], config=config_content, additional_args=[ "--mypy-extra-requirements=django-stubs==1.5.0", "--mypy-version=mypy==0.770", ], ) assert len(result) == 1 assert result[0].exit_code == 1 assert "src/python/project/app.py:4" in result[0].stdout