def test_dependency_inference(rule_runner: RuleRunner, caplog) -> None: rule_runner.write_files({ "a/f1.sh": dedent("""\ source b/f.sh source unknown/f.sh """), "a/f2.sh": "source a/f1.sh", "a/BUILD": "shell_sources()", "b/f.sh": "", "b/BUILD": "shell_sources()", # Test handling of ambiguous imports. We should warn on the ambiguous dependency, but # not warn on the disambiguated one and should infer a dep. "ambiguous/dep.sh": "", "ambiguous/disambiguated.sh": "", "ambiguous/main.sh": dedent("""\ source ambiguous/dep.sh source ambiguous/disambiguated.sh """), "ambiguous/BUILD": dedent("""\ shell_sources(name='dep1', sources=['dep.sh', 'disambiguated.sh']) shell_sources(name='dep2', sources=['dep.sh', 'disambiguated.sh']) shell_sources( name='main', sources=['main.sh'], dependencies=['!./disambiguated.sh:dep2'], ) """), }) def run_dep_inference(address: Address) -> InferredDependencies: tgt = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferShellDependencies(tgt[ShellSourceField])]) assert run_dep_inference(Address( "a", relative_file_path="f1.sh")) == InferredDependencies( [Address("b", relative_file_path="f.sh")]) caplog.clear() assert run_dep_inference( Address("ambiguous", target_name="main", relative_file_path="main.sh")) == InferredDependencies([ Address("ambiguous", target_name="dep1", relative_file_path="disambiguated.sh") ]) assert len(caplog.records) == 1 assert "The target ambiguous/main.sh:main sources `ambiguous/dep.sh`" in caplog.text assert "['ambiguous/dep.sh:dep1', 'ambiguous/dep.sh:dep2']" in caplog.text assert "disambiguated.sh" not in caplog.text
async def infer_protobuf_dependencies( request: InferProtobufDependencies, protobuf_mapping: ProtobufMapping, protoc: Protoc ) -> InferredDependencies: if not protoc.dependency_inference: return InferredDependencies([], sibling_dependencies_inferrable=False) address = request.sources_field.address wrapped_tgt = await Get(WrappedTarget, Address, address) explicitly_provided_deps, hydrated_sources = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get(HydratedSources, HydrateSourcesRequest(request.sources_field)), ) digest_contents = await Get(DigestContents, Digest, hydrated_sources.snapshot.digest) result: OrderedSet[Address] = OrderedSet() for file_content in digest_contents: for import_path in parse_proto_imports(file_content.content.decode()): unambiguous = protobuf_mapping.mapping.get(import_path) ambiguous = protobuf_mapping.ambiguous_modules.get(import_path) if unambiguous: result.add(unambiguous) elif ambiguous: explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( ambiguous, address, import_reference="file", context=( f"The target {address} imports `{import_path}` in the file " f"{file_content.path}" ), ) maybe_disambiguated = explicitly_provided_deps.disambiguated_via_ignores(ambiguous) if maybe_disambiguated: result.add(maybe_disambiguated) return InferredDependencies(sorted(result), sibling_dependencies_inferrable=True)
async def infer_python_dependencies( request: InferPythonDependencies, python_inference: PythonInference) -> InferredDependencies: if not python_inference.imports: return InferredDependencies([], sibling_dependencies_inferrable=False) stripped_sources = await Get(StrippedSourceFiles, SourceFilesRequest([request.sources_field])) modules = tuple( PythonModule.create_from_stripped_path(PurePath(fp)) for fp in stripped_sources.snapshot.files) digest_contents = await Get(DigestContents, Digest, stripped_sources.snapshot.digest) owner_requests: List[Get[PythonModuleOwner, PythonModule]] = [] for file_content, module in zip(digest_contents, modules): file_imports_obj = find_python_imports(file_content.content.decode(), module_name=module.module) detected_imports = (file_imports_obj.all_imports if python_inference.string_imports else file_imports_obj.explicit_imports) owner_requests.extend( Get(PythonModuleOwner, PythonModule(imported_module)) for imported_module in detected_imports if imported_module not in combined_stdlib) owner_per_import = await MultiGet(owner_requests) result = ( owner.address for owner in owner_per_import if owner.address and owner.address != request.sources_field.address) return InferredDependencies(result, sibling_dependencies_inferrable=True)
async def infer_thrift_dependencies( request: InferThriftDependencies, thrift_mapping: ThriftMapping, thrift: ThriftSubsystem ) -> InferredDependencies: if not thrift.dependency_inference: return InferredDependencies([]) address = request.sources_field.address wrapped_tgt = await Get(WrappedTarget, Address, address) explicitly_provided_deps, parsed_thrift = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get(ParsedThrift, ParsedThriftRequest(request.sources_field)), ) result: OrderedSet[Address] = OrderedSet() for import_path in parsed_thrift.imports: unambiguous = thrift_mapping.mapping.get(import_path) ambiguous = thrift_mapping.ambiguous_modules.get(import_path) if unambiguous: result.add(unambiguous) elif ambiguous: explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( ambiguous, address, import_reference="file", context=( f"The target {address} imports `{import_path}` in the file " f"{wrapped_tgt.target[ThriftSourceField].file_path}" ), ) maybe_disambiguated = explicitly_provided_deps.disambiguated(ambiguous) if maybe_disambiguated: result.add(maybe_disambiguated) return InferredDependencies(sorted(result))
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([], sibling_dependencies_inferrable=False) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) detected_imports = await Get( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, PexInterpreterConstraints.create_from_targets([wrapped_tgt.target], python_setup), ), ) relevant_imports = (detected_imports.all_imports if python_infer_subsystem.string_imports else detected_imports.explicit_imports) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in relevant_imports if imported_module not in combined_stdlib) merged_result = sorted( set(itertools.chain.from_iterable(owners_per_import))) return InferredDependencies(merged_result, sibling_dependencies_inferrable=True)
async def infer_python_dependencies( request: InferPythonDependencies, python_inference: PythonInference ) -> InferredDependencies: if not python_inference.imports: return InferredDependencies() stripped_sources = await Get(StrippedSourceFiles, SourceFilesRequest([request.sources_field])) modules = tuple( PythonModule.create_from_stripped_path(PurePath(fp)) for fp in stripped_sources.snapshot.files ) digest_contents = await Get(DigestContents, Digest, stripped_sources.snapshot.digest) imports_per_file = tuple( find_python_imports(file_content.content.decode(), module_name=module.module) for file_content, module in zip(digest_contents, modules) ) owner_per_import = await MultiGet( Get(PythonModuleOwner, PythonModule(imported_module)) for file_imports in imports_per_file for imported_module in file_imports.explicit_imports if imported_module not in combined_stdlib ) return InferredDependencies( owner.address for owner in owner_per_import if ( owner.address and owner.address.maybe_convert_to_base_target() != request.sources_field.address ) )
async def infer_go_dependencies( request: InferGoProtobufDependenciesRequest, go_protobuf_mapping: GoProtobufImportPathMapping, ) -> InferredDependencies: address = request.sources_field.address maybe_pkg_analysis = await Get(FallibleFirstPartyPkgAnalysis, FirstPartyPkgAnalysisRequest(address)) if maybe_pkg_analysis.analysis is None: _logger.error( softwrap(f""" Failed to analyze {maybe_pkg_analysis.import_path} for dependency inference: {maybe_pkg_analysis.stderr} """)) return InferredDependencies([]) pkg_analysis = maybe_pkg_analysis.analysis inferred_dependencies: list[Address] = [] for import_path in ( *pkg_analysis.imports, *pkg_analysis.test_imports, *pkg_analysis.xtest_imports, ): candidate_addresses = go_protobuf_mapping.mapping.get(import_path, ()) inferred_dependencies.extend(candidate_addresses) return InferredDependencies(inferred_dependencies)
def test_infer_java_imports_ambiguous(rule_runner: RuleRunner, caplog) -> None: ambiguous_source = dedent("""\ package org.pantsbuild.a; public class A {} """) rule_runner.write_files({ "a_one/BUILD": "java_sources()", "a_one/A.java": ambiguous_source, "a_two/BUILD": "java_sources()", "a_two/A.java": ambiguous_source, "b/BUILD": "java_sources()", "b/B.java": dedent("""\ package org.pantsbuild.b; import org.pantsbuild.a.A; public class B {} """), "c/BUILD": dedent("""\ java_sources( dependencies=["!a_two/A.java"], ) """), "c/C.java": dedent("""\ package org.pantsbuild.c; import org.pantsbuild.a.A; public class C {} """), }) target_b = rule_runner.get_target(Address("b", relative_file_path="B.java")) target_c = rule_runner.get_target(Address("c", relative_file_path="C.java")) # Because there are two sources of `org.pantsbuild.a.A`, neither should be inferred for B. But C # disambiguates with a `!`, and so gets the appropriate version. caplog.clear() assert rule_runner.request( InferredDependencies, [InferJavaSourceDependencies(target_b[JavaSourceField]) ]) == InferredDependencies(dependencies=[]) assert len(caplog.records) == 1 assert ( "The target b/B.java imports `org.pantsbuild.a.A`, but Pants cannot safely" in caplog.text) assert rule_runner.request( InferredDependencies, [InferJavaSourceDependencies(target_c[JavaSourceField]) ]) == InferredDependencies( dependencies=[Address("a_one", relative_file_path="A.java")])
def test_infer_python_inits() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), *target_types_rules.rules(), *core_target_types_rules(), infer_python_init_dependencies, SubsystemRule(PythonInferSubsystem), QueryRule(InferredDependencies, (InferInitDependencies, )), ], target_types=[PythonSourcesGeneratorTarget], ) rule_runner.set_options( ["--python-infer-inits", "--source-root-patterns=src/python"], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) rule_runner.write_files({ "src/python/root/__init__.py": "", "src/python/root/BUILD": "python_sources()", "src/python/root/mid/__init__.py": "", "src/python/root/mid/BUILD": "python_sources()", "src/python/root/mid/leaf/__init__.py": "", "src/python/root/mid/leaf/f.py": "", "src/python/root/mid/leaf/BUILD": "python_sources()", "src/python/type_stub/__init__.pyi": "", "src/python/type_stub/foo.pyi": "", "src/python/type_stub/BUILD": "python_sources()", }) def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferInitDependencies(target[PythonSourceField])], ) assert run_dep_inference( Address( "src/python/root/mid/leaf", relative_file_path="f.py")) == InferredDependencies([ Address("src/python/root", relative_file_path="__init__.py"), Address("src/python/root/mid", relative_file_path="__init__.py"), Address("src/python/root/mid/leaf", relative_file_path="__init__.py"), ], ) assert run_dep_inference( Address("src/python/type_stub", relative_file_path="foo.pyi")) == InferredDependencies([ Address("src/python/type_stub", relative_file_path="__init__.pyi") ])
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([], sibling_dependencies_inferrable=False) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) explicitly_provided_deps, detected_imports = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, PexInterpreterConstraints.create_from_targets( [wrapped_tgt.target], python_setup), ), ), ) relevant_imports = tuple( imp for imp in (detected_imports.all_imports if python_infer_subsystem. string_imports else detected_imports.explicit_imports) if imp not in combined_stdlib) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in relevant_imports) merged_result: set[Address] = set() for owners, imp in zip(owners_per_import, relevant_imports): merged_result.update(owners.unambiguous) address = wrapped_tgt.target.address explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( owners.ambiguous, address, import_reference="module", context=f"The target {address} imports `{imp}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated_via_ignores( owners.ambiguous) if maybe_disambiguated: merged_result.add(maybe_disambiguated) return InferredDependencies(sorted(merged_result), sibling_dependencies_inferrable=True)
async def infer_scala_dependencies_via_source_analysis( request: InferScalaSourceDependencies, scala_infer_subsystem: ScalaInferSubsystem, jvm: JvmSubsystem, first_party_symbol_map: FirstPartySymbolMapping, third_party_artifact_mapping: ThirdPartyPackageToArtifactMapping, ) -> InferredDependencies: if not scala_infer_subsystem.imports: return InferredDependencies([]) address = request.sources_field.address wrapped_tgt = await Get(WrappedTarget, Address, address) tgt = wrapped_tgt.target explicitly_provided_deps, analysis = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(tgt[Dependencies])), Get(ScalaSourceDependencyAnalysis, SourceFilesRequest([request.sources_field])), ) symbols: OrderedSet[str] = OrderedSet() if scala_infer_subsystem.imports: symbols.update(analysis.all_imports()) if scala_infer_subsystem.consumed_types: symbols.update(analysis.fully_qualified_consumed_symbols()) resolve = tgt[JvmResolveField].normalized_value(jvm) dependencies: OrderedSet[Address] = OrderedSet() for symbol in symbols: first_party_matches = first_party_symbol_map.symbols.addresses_for_symbol( symbol, resolve=resolve ) third_party_matches = third_party_artifact_mapping.addresses_for_symbol(symbol, resolve) matches = first_party_matches.union(third_party_matches) if not matches: continue explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( matches, address, import_reference="type", context=f"The target {address} imports `{symbol}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated(matches) if maybe_disambiguated: dependencies.add(maybe_disambiguated) return InferredDependencies(dependencies)
def test_infer_scala_imports_with_cycle(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ scala_sources( name = 'a', ) """ ), "A.scala": dedent( """\ package org.pantsbuild.a import org.pantsbuild.b.B class A {} """ ), "sub/BUILD": dedent( """\ scala_sources( name = 'b', ) """ ), "sub/B.scala": dedent( """\ package org.pantsbuild.b import org.pantsbuild.a.A class B {} """ ), } ) target_a = rule_runner.get_target(Address("", target_name="a", relative_file_path="A.scala")) target_b = rule_runner.get_target(Address("sub", target_name="b", relative_file_path="B.scala")) assert rule_runner.request( InferredDependencies, [InferScalaSourceDependencies(target_a[ScalaSourceField])] ) == InferredDependencies(dependencies=[target_b.address]) assert rule_runner.request( InferredDependencies, [InferScalaSourceDependencies(target_b[ScalaSourceField])] ) == InferredDependencies(dependencies=[target_a.address])
def test_infer_unqualified_symbol_from_intermediate_scope(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "foo/BUILD": "scala_sources()", "foo/A.scala": dedent( """\ package org.pantsbuild.outer package intermediate object A { def main(args: Array[String]): Unit = { println(B.Foo) } } """ ), "bar/BUILD": "scala_sources()", "bar/B.scala": dedent( """\ package org.pantsbuild.outer object B { val Foo = 3 } """ ), } ) tgt = rule_runner.get_target(Address("foo", relative_file_path="A.scala")) deps = rule_runner.request( InferredDependencies, [InferScalaSourceDependencies(tgt[ScalaSourceField])] ) assert deps == InferredDependencies([Address("bar", relative_file_path="B.scala")])
async def infer_python_init_dependencies( request: InferInitDependencies, python_infer_subsystem: PythonInferSubsystem) -> InferredDependencies: if not python_infer_subsystem.inits: return InferredDependencies([]) fp = request.sources_field.file_path assert fp is not None init_files = await Get( AncestorFiles, AncestorFilesRequest(input_files=(fp, ), requested=("__init__.py", "__init__.pyi")), ) owners = await MultiGet( Get(Owners, OwnersRequest((f, ))) for f in init_files.snapshot.files) return InferredDependencies(itertools.chain.from_iterable(owners))
async def infer_terraform_module_dependencies( request: InferTerraformModuleDependenciesRequest, ) -> InferredDependencies: hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field)) paths = OrderedSet(filename for filename in hydrated_sources.snapshot.files if filename.endswith(".tf")) result = await Get( ProcessResult, ParseTerraformModuleSources( sources_digest=hydrated_sources.snapshot.digest, paths=tuple(paths), ), ) candidate_spec_paths = [ line for line in result.stdout.decode("utf-8").split("\n") if line ] # For each path, see if there is a `terraform_module` target at the specified spec_path. candidate_targets = await Get( Targets, AddressSpecs([ MaybeEmptySiblingAddresses(path) for path in candidate_spec_paths ])) # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module # per directory. terraform_module_addresses = [ tgt.address for tgt in candidate_targets if tgt.has_field(TerraformModuleSourcesField) ] return InferredDependencies(terraform_module_addresses)
async def infer_terraform_module_dependencies( request: InferTerraformModuleDependenciesRequest, ) -> InferredDependencies: hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field)) paths = OrderedSet( filename for filename in hydrated_sources.snapshot.files if filename.endswith(".tf") ) result = await Get( ProcessResult, ParseTerraformModuleSources( sources_digest=hydrated_sources.snapshot.digest, paths=tuple(paths), ), ) candidate_spec_paths = [line for line in result.stdout.decode("utf-8").split("\n") if line] # For each path, see if there is a `terraform_module` target at the specified spec_path. candidate_targets = await Get( Targets, RawSpecs( dir_globs=tuple(DirGlobSpec(path) for path in candidate_spec_paths), unmatched_glob_behavior=GlobMatchErrorBehavior.ignore, description_of_origin="the `terraform_module` dependency inference rule", ), ) # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module # per directory. terraform_module_addresses = [ tgt.address for tgt in candidate_targets if tgt.has_field(TerraformModuleSourcesField) ] return InferredDependencies(terraform_module_addresses)
def test_infer_java_imports_unnamed_package(rule_runner: RuleRunner) -> None: # A source file without a package declaration lives in the "unnamed package", but may still be # consumed (but not `import`ed) by other files in the unnamed package. rule_runner.write_files({ "BUILD": dedent("""\ java_sources(name = 'a') """), "Main.java": dedent("""\ public class Main { public static void main(String[] args) throws Exception { Lib l = new Lib(); } } """), "Lib.java": dedent("""\ public class Lib {} """), }) target_a = rule_runner.get_target( Address("", target_name="a", relative_file_path="Main.java")) assert rule_runner.request( InferredDependencies, [InferJavaSourceDependencies(target_a[JavaSourceField]) ]) == InferredDependencies(dependencies=[ Address("", target_name="a", relative_file_path="Lib.java") ])
async def infer_java_dependencies_via_source_analysis( request: InferJavaSourceDependencies, ) -> InferredDependencies: jids = await Get( JavaInferredDependencies, JavaInferredDependenciesAndExportsRequest(request.sources_field), ) return InferredDependencies(dependencies=jids.dependencies)
def test_infer_scala_imports_same_target(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ scala_sources( name = 't', ) """ ), "A.scala": dedent( """\ package org.pantsbuild.a object A {} """ ), "B.scala": dedent( """\ package org.pantsbuild.b object B {} """ ), } ) target_a = rule_runner.get_target(Address("", target_name="t", relative_file_path="A.scala")) target_b = rule_runner.get_target(Address("", target_name="t", relative_file_path="B.scala")) assert ( rule_runner.request( InferredDependencies, [InferScalaSourceDependencies(target_a[ScalaSourceField])], ) == InferredDependencies(dependencies=[]) ) assert ( rule_runner.request( InferredDependencies, [InferScalaSourceDependencies(target_b[ScalaSourceField])], ) == InferredDependencies(dependencies=[]) )
async def infer_scala_dependencies_via_source_analysis( request: InferScalaSourceDependencies, scala_infer_subsystem: ScalaInferSubsystem, jvm: JvmSubsystem, symbol_mapping: SymbolMapping, ) -> InferredDependencies: if not scala_infer_subsystem.imports: return InferredDependencies([]) address = request.sources_field.address wrapped_tgt = await Get( WrappedTarget, WrappedTargetRequest(address, description_of_origin="<infallible>")) tgt = wrapped_tgt.target explicitly_provided_deps, analysis = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(tgt[Dependencies])), Get(ScalaSourceDependencyAnalysis, SourceFilesRequest([request.sources_field])), ) symbols: OrderedSet[str] = OrderedSet() if scala_infer_subsystem.imports: symbols.update(analysis.all_imports()) if scala_infer_subsystem.consumed_types: symbols.update(analysis.fully_qualified_consumed_symbols()) resolve = tgt[JvmResolveField].normalized_value(jvm) dependencies: OrderedSet[Address] = OrderedSet() for symbol in symbols: for matches in symbol_mapping.addresses_for_symbol(symbol, resolve).values(): explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( matches, address, import_reference="type", context=f"The target {address} imports `{symbol}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated( matches) if maybe_disambiguated: dependencies.add(maybe_disambiguated) return InferredDependencies(dependencies)
async def infer_protobuf_dependencies( request: InferProtobufDependencies, protobuf_mapping: ProtobufMapping, protoc: Protoc ) -> InferredDependencies: if not protoc.dependency_inference: return InferredDependencies([], sibling_dependencies_inferrable=False) hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field)) digest_contents = await Get(DigestContents, Digest, hydrated_sources.snapshot.digest) result = sorted( { protobuf_mapping[import_path] for file_content in digest_contents for import_path in parse_proto_imports(file_content.content.decode()) if import_path in protobuf_mapping } ) return InferredDependencies(result, sibling_dependencies_inferrable=True)
def test_infer_java_imports(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ java_sources( name = 'a', ) """), "A.java": dedent("""\ package org.pantsbuild.a; import org.pantsbuild.b.B; public class A {} """), "sub/BUILD": dedent("""\ java_sources( name = 'b', ) """), "sub/B.java": dedent("""\ package org.pantsbuild.b; public class B {} """), }) target_a = rule_runner.get_target( Address("", target_name="a", relative_file_path="A.java")) target_b = rule_runner.get_target( Address("sub", target_name="b", relative_file_path="B.java")) assert rule_runner.request( InferredDependencies, [InferJavaSourceDependencies(target_a[JavaSourceField]) ]) == InferredDependencies(dependencies=[target_b.address]) assert rule_runner.request( InferredDependencies, [InferJavaSourceDependencies(target_b[JavaSourceField]) ]) == InferredDependencies(dependencies=[])
async def infer_python_init_dependencies( request: InferInitDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if ( not python_infer_subsystem.options.is_default("inits") and not python_infer_subsystem.inits ) or python_infer_subsystem.init_files is InitFilesInference.never: return InferredDependencies([]) ignore_empty_files = ( python_infer_subsystem.options.is_default("inits") and python_infer_subsystem.init_files is InitFilesInference.content_only ) fp = request.sources_field.file_path assert fp is not None init_files = await Get( AncestorFiles, AncestorFilesRequest( input_files=(fp,), requested=("__init__.py", "__init__.pyi"), ignore_empty_files=ignore_empty_files, ), ) owners = await MultiGet(Get(Owners, OwnersRequest((f,))) for f in init_files.snapshot.files) original_tgt, owner_tgts = await MultiGet( Get( WrappedTarget, WrappedTargetRequest( request.sources_field.address, description_of_origin="<infallible>" ), ), Get(Targets, Addresses(itertools.chain.from_iterable(owners))), ) resolve = original_tgt.target[PythonResolveField].normalized_value(python_setup) python_owners = [ tgt.address for tgt in owner_tgts if ( tgt.has_field(PythonSourceField) and tgt[PythonResolveField].normalized_value(python_setup) == resolve ) ] return InferredDependencies(python_owners)
async def infer_smalltalk_dependencies(request: InferSmalltalkDependencies) -> InferredDependencies: # To demo an inference rule, we simply treat each `sources` file to contain a list of # addresses, one per line. hydrated_sources = await Get[HydratedSources](HydrateSourcesRequest(request.sources_field)) file_contents = await Get[FilesContent](Digest, hydrated_sources.snapshot.digest) all_lines = itertools.chain.from_iterable( fc.content.decode().splitlines() for fc in file_contents ) return InferredDependencies(Address.parse(line) for line in all_lines)
async def infer_go_dependencies( request: InferGoPackageDependenciesRequest, std_lib_imports: GoStdLibImports, package_mapping: ImportPathToPackages, ) -> InferredDependencies: addr = request.sources_field.address maybe_pkg_analysis = await Get( FallibleFirstPartyPkgAnalysis, FirstPartyPkgAnalysisRequest(addr) ) if maybe_pkg_analysis.analysis is None: logger.error( f"Failed to analyze {maybe_pkg_analysis.import_path} for dependency inference:\n" f"{maybe_pkg_analysis.stderr}" ) return InferredDependencies([]) pkg_analysis = maybe_pkg_analysis.analysis inferred_dependencies = [] for import_path in ( *pkg_analysis.imports, *pkg_analysis.test_imports, *pkg_analysis.xtest_imports, ): if import_path in std_lib_imports: continue # Avoid a dependency cycle caused by external test imports of this package (i.e., "xtest"). if import_path == pkg_analysis.import_path: continue candidate_packages = package_mapping.mapping.get(import_path, ()) if len(candidate_packages) > 1: # TODO(#12761): Use ExplicitlyProvidedDependencies for disambiguation. logger.warning( f"Ambiguous mapping for import path {import_path} on packages at addresses: {candidate_packages}" ) elif len(candidate_packages) == 1: inferred_dependencies.append(candidate_packages[0]) else: logger.debug( f"Unable to infer dependency for import path '{import_path}' " f"in go_package at address '{addr}'." ) return InferredDependencies(inferred_dependencies)
async def infer_python_conftest_dependencies( request: InferConftestDependencies, python_inference: PythonInference, ) -> InferredDependencies: if not python_inference.conftests: return InferredDependencies() # Locate conftest.py files not already in the Snapshot. hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field)) extra_conftest_files = await Get( AncestorFiles, AncestorFilesRequest("conftest.py", hydrated_sources.snapshot), ) # And add dependencies on their owners. # NB: Because conftest.py files effectively always have content, we require an owning target. owners = await MultiGet( Get(Owners, OwnersRequest((f,), OwnersNotFoundBehavior.error)) for f in extra_conftest_files.snapshot.files ) return InferredDependencies(itertools.chain.from_iterable(owners))
async def infer_python_conftest_dependencies( request: InferConftestDependencies, python_infer_subsystem: PythonInferSubsystem, ) -> InferredDependencies: if not python_infer_subsystem.conftests: return InferredDependencies([]) fp = request.sources_field.file_path assert fp is not None conftest_files = await Get( AncestorFiles, AncestorFilesRequest(input_files=(fp, ), requested=("conftest.py", )), ) owners = await MultiGet( # NB: Because conftest.py files effectively always have content, we require an # owning target. Get(Owners, OwnersRequest((f, ), OwnersNotFoundBehavior.error)) for f in conftest_files.snapshot.files) return InferredDependencies(itertools.chain.from_iterable(owners))
def check(address: Address, expected: list[Address]) -> None: target = rule_runner.get_target(address) result = rule_runner.request( InferredDependencies, [InferInitDependencies(target[PythonSourceField])], ) if behavior == InitFilesInference.never: assert not result else: assert result == InferredDependencies(expected)
def test_infer_python_conftests() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), *target_types_rules.rules(), *core_target_types_rules(), infer_python_conftest_dependencies, SubsystemRule(PythonInferSubsystem), QueryRule(InferredDependencies, (InferConftestDependencies,)), ], target_types=[PythonTestsGeneratorTarget, PythonTestUtilsGeneratorTarget], objects={"parametrize": Parametrize}, ) rule_runner.set_options( [ "--source-root-patterns=src/python", "--python-resolves={'a': '', 'b': ''}", "--python-default-resolve=a", "--python-enable-resolves", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) rule_runner.write_files( { "src/python/root/conftest.py": "", "src/python/root/BUILD": "python_test_utils(resolve=parametrize('a', 'b'))", "src/python/root/mid/conftest.py": "", "src/python/root/mid/BUILD": "python_test_utils()", "src/python/root/mid/leaf/conftest.py": "", "src/python/root/mid/leaf/this_is_a_test.py": "", "src/python/root/mid/leaf/BUILD": "python_test_utils()\npython_tests(name='tests')", } ) def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferConftestDependencies(target[PythonSourceField])], ) assert run_dep_inference( Address( "src/python/root/mid/leaf", target_name="tests", relative_file_path="this_is_a_test.py" ) ) == InferredDependencies( [ Address( "src/python/root", relative_file_path="conftest.py", parameters={"resolve": "a"} ), Address("src/python/root/mid", relative_file_path="conftest.py"), Address("src/python/root/mid/leaf", relative_file_path="conftest.py"), ], )
async def infer_shell_dependencies( request: InferShellDependencies, shell_mapping: ShellMapping, shell_setup: ShellSetup) -> InferredDependencies: if not shell_setup.dependency_inference: return InferredDependencies([], sibling_dependencies_inferrable=False) address = request.sources_field.address wrapped_tgt = await Get(WrappedTarget, Address, address) explicitly_provided_deps, hydrated_sources = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get(HydratedSources, HydrateSourcesRequest(request.sources_field)), ) per_file_digests = await MultiGet( Get(Digest, DigestSubset(hydrated_sources.snapshot.digest, PathGlobs([f]))) for f in hydrated_sources.snapshot.files) all_detected_imports = await MultiGet( Get(ParsedShellImports, ParseShellImportsRequest(digest, f)) for digest, f in zip(per_file_digests, hydrated_sources.snapshot.files) ) result: OrderedSet[Address] = OrderedSet() for detected_imports in all_detected_imports: for import_path in detected_imports: unambiguous = shell_mapping.mapping.get(import_path) ambiguous = shell_mapping.ambiguous_modules.get(import_path) if unambiguous: result.add(unambiguous) elif ambiguous: explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( ambiguous, address, import_reference="file", context=f"The target {address} sources `{import_path}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated_via_ignores( ambiguous) if maybe_disambiguated: result.add(maybe_disambiguated) return InferredDependencies(sorted(result), sibling_dependencies_inferrable=True)