def test_source_override(rule_runner: RuleRunner) -> None: file_addr = Address("", target_name="reqs", relative_file_path="subdir/pyproject.toml") assert_poetry_requirements( rule_runner, "poetry_requirements(name='reqs', source='subdir/pyproject.toml')", dedent("""\ [tool.poetry.dependencies] ansicolors = ">=1.18.0" [tool.poetry.dev-dependencies] """), pyproject_toml_relpath="subdir/pyproject.toml", expected_targets={ PythonRequirementTarget( { "dependencies": [file_addr.spec], "requirements": ["ansicolors>=1.18.0"] }, address=Address("", target_name="reqs", generated_name="ansicolors"), ), TargetGeneratorSourcesHelperTarget( {"sources": ["subdir/pyproject.toml"]}, file_addr), }, )
def test_source_override(rule_runner: RuleRunner) -> None: assert_poetry_requirements( rule_runner, "poetry_requirements(source='subdir/pyproject.toml')", dedent("""\ [tool.poetry.dependencies] ansicolors = ">=1.18.0" [tool.poetry.dev-dependencies] """), pyproject_toml_relpath="subdir/pyproject.toml", expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["subdir/pyproject.toml"]}, address=Address("", target_name="subdir_pyproject.toml"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":subdir_pyproject.toml"], "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], }, address=Address("", target_name="ansicolors"), ), ], )
def test_invalid_req(rule_runner: RuleRunner) -> None: """Test that we give a nice error message.""" fake_file_tgt = TargetGeneratorSourcesHelperTarget( {"sources": []}, Address("doesnt_matter")) with pytest.raises(ExecutionError) as exc: assert_python_requirements( rule_runner, "python_requirements()", "\n\nNot A Valid Req == 3.7", expected_file_dep=fake_file_tgt, expected_targets=[], ) assert "Invalid requirement 'Not A Valid Req == 3.7' in requirements.txt at line 3" in str( exc.value) # Give a nice error message if it looks like they're using pip VCS-style requirements. with pytest.raises(ExecutionError) as exc: assert_python_requirements( rule_runner, "python_requirements()", "git+https://github.com/pypa/pip.git#egg=pip", expected_file_dep=fake_file_tgt, expected_targets=[], ) assert "It looks like you're trying to use a pip VCS-style requirement?" in str( exc.value)
def test_supply_python_requirements_file(rule_runner: RuleRunner) -> None: """This tests that we can supply our own `_python_requirements_file`.""" assert_pipenv_requirements( rule_runner, dedent( """ pipenv_requirements( source='custom/pipfile/Pipfile.lock', pipfile_target='//:custom_pipfile_target' ) _generator_sources_helper( name='custom_pipfile_target', sources=['custom/pipfile/Pipfile.lock'] ) """ ), {"default": {"ansicolors": {"version": ">=1.18.0"}}}, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["custom/pipfile/Pipfile.lock"]}, Address("", target_name="custom_pipfile_target"), ), expected_targets=[ PythonRequirementTarget( { "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], "dependencies": ["//:custom_pipfile_target"], }, Address("", target_name="ansicolors"), ), ], pipfile_lock_relpath="custom/pipfile/Pipfile.lock", )
def test_multiple_versions(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each unique dependency name in a requirements.txt file, grouping duplicated dependency names to handle multiple requirement strings per PEP 508.""" file_addr = Address("", target_name="reqs", relative_file_path="requirements.txt") assert_python_requirements( rule_runner, "python_requirements(name='reqs')", dedent( """\ Django>=3.2 Django==3.2.7 confusedmonkey==86 repletewateringcan>=7 """ ), expected_targets={ PythonRequirementTarget( { "requirements": ["Django>=3.2", "Django==3.2.7"], "dependencies": [file_addr.spec], }, Address("", target_name="reqs", generated_name="Django"), ), PythonRequirementTarget( {"requirements": ["confusedmonkey==86"], "dependencies": [file_addr.spec]}, Address("", target_name="reqs", generated_name="confusedmonkey"), ), PythonRequirementTarget( {"requirements": ["repletewateringcan>=7"], "dependencies": [file_addr.spec]}, Address("", target_name="reqs", generated_name="repletewateringcan"), ), TargetGeneratorSourcesHelperTarget({"sources": ["requirements.txt"]}, file_addr), }, )
def test_pipfile_lock(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement_library for each entry in a Pipfile.lock file. Edge cases: * Develop and Default requirements are used * module_mapping works. """ file_addr = Address("", target_name="reqs", relative_file_path="Pipfile.lock") assert_pipenv_requirements( rule_runner, "pipenv_requirements(name='reqs', module_mapping={'ansicolors': ['colors']})", { "default": { "ansicolors": { "version": ">=1.18.0" } }, "develop": { "cachetools": { "markers": "python_version ~= '3.5'", "version": "==4.1.1", "extras": ["ring", "mongo"], } }, }, expected_targets={ PythonRequirementTarget( { "requirements": ["ansicolors>=1.18.0"], "modules": ["colors"], "dependencies": [file_addr.spec], }, Address("", target_name="reqs", generated_name="ansicolors"), ), PythonRequirementTarget( { "requirements": ["cachetools[ring, mongo]==4.1.1;python_version ~= '3.5'"], "dependencies": [file_addr.spec], }, Address("", target_name="reqs", generated_name="cachetools"), ), TargetGeneratorSourcesHelperTarget({"source": "Pipfile.lock"}, file_addr), }, )
def test_source_override(rule_runner: RuleRunner) -> None: file_addr = Address("", target_name="reqs", relative_file_path="subdir/requirements.txt") assert_python_requirements( rule_runner, "python_requirements(name='reqs', source='subdir/requirements.txt')", "ansicolors>=1.18.0", requirements_txt_relpath="subdir/requirements.txt", expected_targets={ PythonRequirementTarget( {"requirements": ["ansicolors>=1.18.0"], "dependencies": [file_addr.spec]}, Address("", target_name="reqs", generated_name="ansicolors"), ), TargetGeneratorSourcesHelperTarget({"sources": ["subdir/requirements.txt"]}, file_addr), }, )
def test_no_req_defined_warning(rule_runner: RuleRunner, caplog: Any) -> None: assert_poetry_requirements( rule_runner, "poetry_requirements()", """ [tool.poetry.dependencies] [tool.poetry.dev-dependencies] """, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, address=Address("", target_name="pyproject.toml"), ), expected_targets=[], ) assert "No requirements defined" in caplog.text
def test_no_tool_poetry(rule_runner: RuleRunner) -> None: with pytest.raises(ExecutionError) as exc: assert_poetry_requirements( rule_runner, "poetry_requirements()", """ foo = 4 """, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, address=Address("", target_name="pyproject.toml"), ), expected_targets=[], ) assert "`tool.poetry` found in pyproject.toml" in str(exc.value)
def test_multiple_versions(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each unique dependency name in a requirements.txt file, grouping duplicated dependency names to handle multiple requirement strings per PEP 508.""" assert_python_requirements( rule_runner, "python_requirements()", dedent("""\ Django>=3.2 Django==3.2.7 confusedmonkey==86 repletewateringcan>=7 """), expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["requirements.txt"]}, Address("", target_name="requirements.txt"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [ PipRequirement.parse("Django>=3.2"), PipRequirement.parse("Django==3.2.7"), ], }, Address("", target_name="Django"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("confusedmonkey==86")], }, Address("", target_name="confusedmonkey"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("repletewateringcan>=7")], }, Address("", target_name="repletewateringcan"), ), ], )
def test_bad_req_type(rule_runner: RuleRunner) -> None: with pytest.raises(ExecutionError) as exc: assert_poetry_requirements( rule_runner, "poetry_requirements()", """ [tool.poetry.dependencies] foo = 4 [tool.poetry.dev-dependencies] """, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, address=Address("", target_name="pyproject.toml"), ), expected_targets=[], ) assert "was of type int" in str(exc.value)
def test_no_req_defined_warning(rule_runner: RuleRunner, caplog) -> None: assert_poetry_requirements( rule_runner, "poetry_requirements(name='reqs')", """ [tool.poetry.dependencies] [tool.poetry.dev-dependencies] """, expected_targets={ TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, Address("", target_name="reqs", relative_file_path="pyproject.toml"), ) }, ) assert "No requirements defined" in caplog.text
def test_non_pep440_error(rule_runner: RuleRunner, caplog: Any) -> None: with pytest.raises(ExecutionError) as exc: assert_poetry_requirements( rule_runner, "poetry_requirements()", """ [tool.poetry.dependencies] foo = "~r62b" [tool.poetry.dev-dependencies] """, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, address=Address("", target_name="pyproject.toml"), ), expected_targets=[], ) assert 'Failed to parse requirement foo = "~r62b" in pyproject.toml' in str( exc.value)
def test_source_override(rule_runner: RuleRunner) -> None: assert_python_requirements( rule_runner, "python_requirements(source='subdir/requirements.txt')", "ansicolors>=1.18.0", requirements_txt_relpath="subdir/requirements.txt", expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["subdir/requirements.txt"]}, Address("", target_name="subdir_requirements.txt"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":subdir_requirements.txt"], "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], }, Address("", target_name="ansicolors"), ), ], )
def test_pipfile_lock(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new `python_requirement` target for each entry in a Pipfile.lock file. Edge cases: * Develop and Default requirements are used * If a module_mapping is given, and the project is in the map, we set `modules`. It works regardless of capitalization. """ assert_pipenv_requirements( rule_runner, "pipenv_requirements(module_mapping={'ANSIcolors': ['colors']})", { "default": {"ansicolors": {"version": ">=1.18.0"}}, "develop": {"cachetools": {"markers": "python_version ~= '3.5'", "version": "==4.1.1"}}, }, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["Pipfile.lock"]}, Address("", target_name="Pipfile.lock") ), expected_targets=[ PythonRequirementTarget( { "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], "dependencies": [":Pipfile.lock"], "modules": ["colors"], }, Address("", target_name="ansicolors"), ), PythonRequirementTarget( { "requirements": [ PipRequirement.parse("cachetools==4.1.1;python_version ~= '3.5'") ], "dependencies": [":Pipfile.lock"], }, Address("", target_name="cachetools"), ), ], )
def test_properly_creates_extras_requirements(rule_runner: RuleRunner) -> None: """This tests the proper parsing of requirements installed with specified extras.""" assert_pipenv_requirements( rule_runner, "pipenv_requirements()", { "default": {"ansicolors": {"version": ">=1.18.0", "extras": ["neon"]}}, "develop": { "cachetools": { "markers": "python_version ~= '3.5'", "version": "==4.1.1", "extras": ["ring", "mongo"], } }, }, expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["Pipfile.lock"]}, Address("", target_name="Pipfile.lock") ), expected_targets=[ PythonRequirementTarget( { "requirements": [PipRequirement.parse("ansicolors[neon]>=1.18.0")], "dependencies": [":Pipfile.lock"], }, Address("", target_name="ansicolors"), ), PythonRequirementTarget( { "requirements": [ PipRequirement.parse( "cachetools[ring,mongo]==4.1.1;python_version ~= '3.5'" ) ], "dependencies": [":Pipfile.lock"], }, Address("", target_name="cachetools"), ), ], )
async def generate_from_pipenv_requirement( request: GenerateFromPipenvRequirementsRequest, python_setup: PythonSetup) -> GeneratedTargets: generator = request.generator lock_rel_path = generator[PipenvSourceField].value lock_full_path = generator[PipenvSourceField].file_path overrides = { canonicalize_project_name(k): v for k, v in request.require_unparametrized_overrides().items() } file_tgt = TargetGeneratorSourcesHelperTarget( {TargetGeneratorSourcesHelperSourcesField.alias: [lock_rel_path]}, Address( generator.address.spec_path, target_name=generator.address.target_name, relative_file_path=lock_rel_path, ), ) digest_contents = await Get( DigestContents, PathGlobs( [lock_full_path], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin= f"{generator}'s field `{PipenvSourceField.alias}`", ), ) lock_info = json.loads(digest_contents[0].content) # Validate the resolve is legal. generator[PythonRequirementResolveField].normalized_value(python_setup) module_mapping = generator[ModuleMappingField].value stubs_mapping = generator[TypeStubsModuleMappingField].value inherited_fields = { field.alias: field.value for field in request.generator.field_values.values() if isinstance(field, (*COMMON_TARGET_FIELDS, PythonRequirementResolveField)) } def generate_tgt(raw_req: str, info: dict) -> PythonRequirementTarget: if info.get("extras"): raw_req += f"[{','.join(info['extras'])}]" raw_req += info.get("version", "") if info.get("markers"): raw_req += f";{info['markers']}" parsed_req = PipRequirement.parse(raw_req) normalized_proj_name = canonicalize_project_name( parsed_req.project_name) tgt_overrides = overrides.pop(normalized_proj_name, {}) if Dependencies.alias in tgt_overrides: tgt_overrides[Dependencies.alias] = list( tgt_overrides[Dependencies.alias]) + [file_tgt.address.spec] return PythonRequirementTarget( { **inherited_fields, PythonRequirementsField.alias: [parsed_req], PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name), PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(normalized_proj_name), # This may get overridden by `tgt_overrides`, which will have already added in # the file tgt. Dependencies.alias: [file_tgt.address.spec], **tgt_overrides, }, generator.address.create_generated(parsed_req.project_name), ) result = tuple( generate_tgt(req, info) for req, info in { **lock_info.get("default", {}), **lock_info.get("develop", {}) }.items()) + (file_tgt, ) if overrides: raise InvalidFieldException( f"Unused key in the `overrides` field for {request.generator.address}: " f"{sorted(overrides)}") return GeneratedTargets(generator, result)
async def generate_from_python_requirement( request: GenerateFromPythonRequirementsRequest, python_setup: PythonSetup ) -> GeneratedTargets: generator = request.generator requirements_rel_path = generator[PythonRequirementsSourceField].value requirements_full_path = generator[PythonRequirementsSourceField].file_path overrides = { canonicalize_project_name(k): v for k, v in request.require_unparametrized_overrides().items() } file_tgt = TargetGeneratorSourcesHelperTarget( {TargetGeneratorSourcesHelperSourcesField.alias: [requirements_rel_path]}, Address( generator.address.spec_path, target_name=generator.address.target_name, relative_file_path=requirements_rel_path, ), ) digest_contents = await Get( DigestContents, PathGlobs( [requirements_full_path], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin=f"{generator}'s field `{PythonRequirementsSourceField.alias}`", ), ) requirements = parse_requirements_file( digest_contents[0].content.decode(), rel_path=requirements_full_path ) grouped_requirements = itertools.groupby( requirements, lambda parsed_req: parsed_req.project_name ) # Validate the resolve is legal. generator[PythonRequirementResolveField].normalized_value(python_setup) module_mapping = generator[ModuleMappingField].value stubs_mapping = generator[TypeStubsModuleMappingField].value inherited_fields = { field.alias: field.value for field in request.generator.field_values.values() if isinstance(field, (*COMMON_TARGET_FIELDS, PythonRequirementResolveField)) } def generate_tgt( project_name: str, parsed_reqs: Iterable[PipRequirement] ) -> PythonRequirementTarget: normalized_proj_name = canonicalize_project_name(project_name) tgt_overrides = overrides.pop(normalized_proj_name, {}) if Dependencies.alias in tgt_overrides: tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [ file_tgt.address.spec ] return PythonRequirementTarget( { **inherited_fields, PythonRequirementsField.alias: list(parsed_reqs), PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name), PythonRequirementTypeStubModulesField.alias: stubs_mapping.get( normalized_proj_name ), # This may get overridden by `tgt_overrides`, which will have already added in # the file tgt. Dependencies.alias: [file_tgt.address.spec], **tgt_overrides, }, generator.address.create_generated(project_name), ) result = tuple( generate_tgt(project_name, parsed_reqs_) for project_name, parsed_reqs_ in grouped_requirements ) + (file_tgt,) if overrides: raise InvalidFieldException( f"Unused key in the `overrides` field for {request.generator.address}: " f"{sorted(overrides)}" ) return GeneratedTargets(generator, result)
async def generate_from_python_requirement( request: GenerateFromPoetryRequirementsRequest, build_root: BuildRoot, union_membership: UnionMembership, ) -> GeneratedTargets: generator = request.generator pyproject_rel_path = generator[PoetryRequirementsSourceField].value pyproject_full_path = generator[PoetryRequirementsSourceField].file_path overrides = { canonicalize_project_name(k): v for k, v in request.require_unparametrized_overrides().items() } file_tgt = TargetGeneratorSourcesHelperTarget( {TargetGeneratorSourcesHelperSourcesField.alias: pyproject_rel_path}, Address( request.template_address.spec_path, target_name=request.template_address.target_name, relative_file_path=pyproject_rel_path, ), union_membership, ) digest_contents = await Get( DigestContents, PathGlobs( [pyproject_full_path], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin=f"{generator}'s field `{PoetryRequirementsSourceField.alias}`", ), ) requirements = parse_pyproject_toml( PyProjectToml( build_root=PurePath(build_root.path), toml_relpath=PurePath(pyproject_full_path), toml_contents=digest_contents[0].content.decode(), ) ) module_mapping = generator[ModuleMappingField].value stubs_mapping = generator[TypeStubsModuleMappingField].value def generate_tgt(parsed_req: PipRequirement) -> PythonRequirementTarget: normalized_proj_name = canonicalize_project_name(parsed_req.project_name) tgt_overrides = overrides.pop(normalized_proj_name, {}) if Dependencies.alias in tgt_overrides: tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [ file_tgt.address.spec ] return PythonRequirementTarget( { **request.template, PythonRequirementsField.alias: [parsed_req], PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name), PythonRequirementTypeStubModulesField.alias: stubs_mapping.get( normalized_proj_name ), # This may get overridden by `tgt_overrides`, which will have already added in # the file tgt. Dependencies.alias: [file_tgt.address.spec], **tgt_overrides, }, request.template_address.create_generated(parsed_req.project_name), union_membership, ) result = tuple(generate_tgt(requirement) for requirement in requirements) + (file_tgt,) if overrides: raise InvalidFieldException( softwrap( f""" Unused key in the `overrides` field for {request.template_address}: {sorted(overrides)} """ ) ) return GeneratedTargets(generator, result)
def test_requirements_txt(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each entry in a requirements.txt file, where each dependency is unique. Some edge cases: * We ignore comments and options (values that start with `--`). * module_mapping works regardless of capitalization. * Projects get normalized thanks to Requirement.parse(). * Overrides works, including for dependencies. """ file_addr = Address("", target_name="reqs", relative_file_path="requirements.txt") assert_python_requirements( rule_runner, dedent("""\ python_requirements( name='reqs', module_mapping={'ansiCOLORS': ['colors']}, type_stubs_module_mapping={'Django-types': ['django']}, overrides={ "ansicolors": {"tags": ["overridden"]}, "Django": {"dependencies": ["#Django-types"]}, }, ) """), dedent("""\ # Comment. --find-links=https://duckduckgo.com ansicolors>=1.18.0 Django==3.2 ; python_version>'3' Django-types Un-Normalized-PROJECT # Inline comment. pip@ git+https://github.com/pypa/pip.git """), expected_targets={ PythonRequirementTarget( { "requirements": ["ansicolors>=1.18.0"], "modules": ["colors"], "dependencies": [file_addr.spec], "tags": ["overridden"], }, Address("", target_name="reqs", generated_name="ansicolors"), ), PythonRequirementTarget( { "requirements": ["Django==3.2 ; python_version>'3'"], "dependencies": ["#Django-types", file_addr.spec], }, Address("", target_name="reqs", generated_name="Django"), ), PythonRequirementTarget( { "requirements": ["Django-types"], "type_stub_modules": ["django"], "dependencies": [file_addr.spec], }, Address("", target_name="reqs", generated_name="Django-types"), ), PythonRequirementTarget( { "requirements": ["Un_Normalized_PROJECT"], "dependencies": [file_addr.spec] }, Address("", target_name="reqs", generated_name="Un-Normalized-PROJECT"), ), PythonRequirementTarget( { "requirements": ["pip@ git+https://github.com/pypa/pip.git"], "dependencies": [file_addr.spec], }, Address("", target_name="reqs", generated_name="pip"), ), TargetGeneratorSourcesHelperTarget({"source": "requirements.txt"}, file_addr), }, )
def test_requirements_txt(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each entry in a requirements.txt file, where each dependency is unique. Some edge cases: * We ignore comments and options (values that start with `--`). * If a module_mapping is given, and the project is in the map, we copy over a subset of the mapping to the created target. It works regardless of capitalization. * Projects get normalized thanks to Requirement.parse(). """ assert_python_requirements( rule_runner, dedent("""\ python_requirements( module_mapping={'ansiCOLORS': ['colors']}, type_stubs_module_mapping={'Django-types': ['django']}, ) """), dedent("""\ # Comment. --find-links=https://duckduckgo.com ansicolors>=1.18.0 Django==3.2 ; python_version>'3' Django-types Un-Normalized-PROJECT # Inline comment. pip@ git+https://github.com/pypa/pip.git """), expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["requirements.txt"]}, Address("", target_name="requirements.txt"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], "modules": ["colors"], }, Address("", target_name="ansicolors"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Django==3.2 ; python_version>'3'")], }, Address("", target_name="Django"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Django-types")], "type_stub_modules": ["django"], }, Address("", target_name="Django-types"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Un_Normalized_PROJECT")], }, Address("", target_name="Un-Normalized-PROJECT"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [ PipRequirement.parse( "pip@ git+https://github.com/pypa/pip.git") ], }, Address("", target_name="pip"), ), ], )
def test_generate_package_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "src/go/BUILD": "go_mod()\n", "src/go/go.mod": dedent("""\ module example.com/src/go go 1.17 require ( github.com/google/go-cmp v0.4.0 github.com/google/uuid v1.2.0 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect ) """), "src/go/go.sum": dedent("""\ github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= """), "src/go/hello.go": "", "src/go/subdir/f.go": "", "src/go/subdir/f2.go": "", "src/go/another_dir/subdir/f.go": "", }) generated = rule_runner.request( _TargetParametrizations, [ _TargetParametrizationsRequest(Address("src/go"), description_of_origin="tests") ], ).parametrizations file_tgts = [ TargetGeneratorSourcesHelperTarget( {TargetGeneratorSourcesHelperSourcesField.alias: fp}, Address("src/go", relative_file_path=fp), ) for fp in ("go.mod", "go.sum") ] def gen_third_party_tgt(import_path: str) -> GoThirdPartyPackageTarget: return GoThirdPartyPackageTarget( { GoImportPathField.alias: import_path, Dependencies.alias: [t.address.spec for t in file_tgts], }, Address("src/go", generated_name=import_path), ) all_third_party = { gen_third_party_tgt(pkg) for pkg in ( "github.com/google/uuid", "github.com/google/go-cmp/cmp", "github.com/google/go-cmp/cmp/cmpopts", "github.com/google/go-cmp/cmp/internal/diff", "github.com/google/go-cmp/cmp/internal/flags", "github.com/google/go-cmp/cmp/internal/function", "github.com/google/go-cmp/cmp/internal/testprotos", "github.com/google/go-cmp/cmp/internal/teststructs", "github.com/google/go-cmp/cmp/internal/value", "golang.org/x/xerrors", "golang.org/x/xerrors/internal", ) } assert set(generated.values()) == {*file_tgts, *all_third_party}
def test_pyproject_toml(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each entry in a pyproject.toml file. Note that this just ensures proper targets are created; see prior tests for specific parsing edge cases. """ assert_poetry_requirements( rule_runner, dedent("""\ poetry_requirements( # module_mapping should work regardless of capitalization. module_mapping={'ansiCOLORS': ['colors']}, type_stubs_module_mapping={'Django-types': ['django']}, ) """), dedent("""\ [tool.poetry.dependencies] Django = {version = "3.2", python = "3"} Django-types = "2" Un-Normalized-PROJECT = "1.0.0" [tool.poetry.dev-dependencies] ansicolors = ">=1.18.0" """), expected_file_dep=TargetGeneratorSourcesHelperTarget( {"sources": ["pyproject.toml"]}, address=Address("", target_name="pyproject.toml"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":pyproject.toml"], "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], "modules": ["colors"], }, address=Address("", target_name="ansicolors"), ), PythonRequirementTarget( { "dependencies": [":pyproject.toml"], "requirements": [ PipRequirement.parse( "Django==3.2 ; python_version == '3'") ], }, address=Address("", target_name="Django"), ), PythonRequirementTarget( { "dependencies": [":pyproject.toml"], "requirements": [PipRequirement.parse("Django-types==2")], "type_stub_modules": ["django"], }, address=Address("", target_name="Django-types"), ), PythonRequirementTarget( { "dependencies": [":pyproject.toml"], "requirements": [PipRequirement.parse("Un_Normalized_PROJECT == 1.0.0")], }, address=Address("", target_name="Un-Normalized-PROJECT"), ), ], )
def gen_file_tgt(fp: str) -> TargetGeneratorSourcesHelperTarget: return TargetGeneratorSourcesHelperTarget( {TargetGeneratorSourcesHelperSourcesField.alias: fp}, generator_addr.create_file(fp), union_membership, )