async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get[TransitiveTargets](Addresses, request.addresses) all_targets = transitive_targets.closure python_targets = [] resource_targets = [] python_requirement_fields = [] for tgt in all_targets: if tgt.has_field(PythonSources): python_targets.append(tgt) if tgt.has_field(PythonRequirementsField): python_requirement_fields.append(tgt[PythonRequirementsField]) # NB: PythonRequirementsFileSources is a subclass of FilesSources. We filter it out so that # requirements.txt is not included in the PEX and so that irrelevant changes to it (e.g. # whitespace changes) do not invalidate the PEX. if tgt.has_field(ResourcesSources) or ( tgt.has_field(FilesSources) and not tgt.has_field(PythonRequirementsFileSources)): resource_targets.append(tgt) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt.get(PythonInterpreterCompatibility) for tgt in python_targets), python_setup) input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( Targets(python_targets + resource_targets)) input_digests.append(prepared_sources.snapshot.directory_digest) merged_input_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(input_digests))) requirements = PexRequirements.create_from_requirement_fields( python_requirement_fields, additional_requirements=request.additional_requirements) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def get_requirements( dep_owner: DependencyOwner, union_membership: UnionMembership) -> ExportedTargetRequirements: transitive_targets = await Get( TransitiveTargets, Addresses([dep_owner.exported_target.target.address])) ownable_tgts = [ tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership) ] owners = await MultiGet( Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts) owned_by_us: Set[Target] = set() owned_by_others: Set[Target] = set() for tgt, owner in zip(ownable_tgts, owners): (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt) # Get all 3rdparty deps of our owned deps. # # Note that we need only consider requirements that are direct dependencies of our owned deps: # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U, # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we # will require ET. # # TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type # `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or # stricter build graph hygiene, it makes sense to require that targets directly declare their # true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect # deps across exported target boundaries, it's not a big stretch to just insist that # requirements must be direct deps. direct_deps_tgts = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us) reqs = PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in itertools.chain.from_iterable(direct_deps_tgts) if tgt.has_field(PythonRequirementsField)) req_strs = list(reqs) # Add the requirements on any exported targets on which we depend. exported_targets_we_depend_on = await MultiGet( Get(ExportedTarget, OwnedDependency(tgt)) for tgt in owned_by_others) req_strs.extend(et.provides.requirement for et in set(exported_targets_we_depend_on)) return ExportedTargetRequirements(req_strs)
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get[TransitiveTargets](Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( Targets(all_targets)) input_digests.append(prepared_sources.snapshot.digest) merged_input_digest = await Get[Digest](MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) requirements = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, ) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets) ) input_digests.append(prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility) ), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, ) requirements = exact_reqs description = request.description if python_setup.requirement_constraints: # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names exact_req_projects = { canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements(next(iter(constraints_file_contents)).content.decode()) ) constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or ( python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES and request.internal_only ): if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope " "because constraints file does not cover all requirements." ) else: requirements = PexRequirements(str(req) for req in constraints_file_reqs) description = description or f"Resolving {python_setup.requirement_constraints}" elif ( python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( f"[python-setup].resolve_all_constraints is set to " f"{python_setup.resolve_all_constraints.value}, so " f"[python-setup].requirement_constraints must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=description, )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get(StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)) input_digests.append( prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, ) requirements = exact_reqs if python_setup.requirement_constraints: exact_req_projects = { Requirement.parse(req).project_name for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin= "the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements( next(iter(constraints_file_contents)).content.decode())) constraint_file_projects = { req.project_name for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints: if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope" "Because constraints file does not cover all requirements." ) else: requirements = PexRequirements( str(req) for req in constraints_file_reqs) elif python_setup.resolve_all_constraints: raise ValueError( "resolve_all_constraints in the [python-setup] scope is set, so " "requirement_constraints in [python-setup] must also be provided.") return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult: # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies. # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`. plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in partition.plugin_targets if plugin_tgt.has_field(PythonRequirementsField)) target_requirements = PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in partition.targets_with_dependencies if tgt.has_field(PythonRequirementsField)) pylint_pex_request = Get( Pex, PexRequest( output_filename="pylint.pex", requirements=PexRequirements( [*pylint.all_requirements, *plugin_requirements]), interpreter_constraints=partition.interpreter_constraints, ), ) requirements_pex_request = Get( Pex, PexRequest( output_filename="requirements.pex", requirements=target_requirements, interpreter_constraints=partition.interpreter_constraints, ), ) # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any Pylint transitive requirements will shadow corresponding user # requirements, which could lead to problems. pylint_runner_pex_args = [ "--pex-path", ":".join(["pylint.pex", "requirements.pex"]) ] pylint_runner_pex_request = Get( Pex, PexRequest( output_filename="pylint_runner.pex", entry_point=pylint.entry_point, interpreter_constraints=partition.interpreter_constraints, additional_args=pylint_runner_pex_args, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", ), ) prepare_plugin_sources_request = Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets), ) prepare_python_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies), ) field_set_sources_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets), ) ( pylint_pex, requirements_pex, pylint_runner_pex, config_digest, prepared_plugin_sources, prepared_python_sources, field_set_sources, ) = await MultiGet( pylint_pex_request, requirements_pex_request, pylint_runner_pex_request, config_digest_request, prepare_plugin_sources_request, prepare_python_sources_request, field_set_sources_request, ) prefixed_plugin_sources = (await Get( Digest, AddPrefix( prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"), ) if pylint.source_plugins else EMPTY_DIGEST) pythonpath = list(prepared_python_sources.source_roots) if pylint.source_plugins: # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must # point to the plugin's directory, rather than to a parent's directory, because # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but # not `path.to.plugin`. (This means users must have specified the parent directory as a # source root.) pythonpath.append("__plugins") input_digest = await Get( Digest, MergeDigests(( pylint_pex.digest, requirements_pex.digest, pylint_runner_pex.digest, config_digest, prefixed_plugin_sources, prepared_python_sources.source_files.snapshot.digest, )), ) address_references = ", ".join( sorted(field_set.address.spec for field_set in partition.field_sets)) result = await Get( FallibleProcessResult, PexProcess( pylint_runner_pex, argv=generate_args(source_files=field_set_sources, pylint=pylint), input_digest=input_digest, extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)}, description= (f"Run Pylint on {pluralize(len(partition.field_sets), 'target')}: " f"{address_references}."), ), ) return LintResult.from_fallible_process_result(result, linter_name="Pylint")
async def pylint_lint_partition( partition: PylintPartition, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies. # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`. plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in partition.plugin_targets if plugin_tgt.has_field(PythonRequirementsField)) target_requirements = PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in partition.targets_with_dependencies if tgt.has_field(PythonRequirementsField)) pylint_pex_request = Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements( [*pylint.get_requirement_specs(), *plugin_requirements]), interpreter_constraints=partition.interpreter_constraints, entry_point=pylint.get_entry_point(), )) requirements_pex_request = Get[Pex](PexRequest( output_filename="requirements.pex", requirements=target_requirements, interpreter_constraints=partition.interpreter_constraints, )) # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any Pylint transitive requirements will shadow corresponding user # requirements, which could lead to problems. pylint_runner_pex_args = [ "--pex-path", ":".join(["pylint.pex", "requirements.pex"]) ] if pylint.source_plugins: # NB: See below for why we set PYTHONPATH to load source plugins. This setting is necessary # for PEX to pick up the PYTHONPATH value. pylint_runner_pex_args.append("--inherit-path=fallback") pylint_runner_pex_request = Get[Pex](PexRequest( output_filename="pylint_runner.pex", entry_point=pylint.get_entry_point(), interpreter_constraints=partition.interpreter_constraints, additional_args=pylint_runner_pex_args, )) config_snapshot_request = Get[Snapshot](PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) prepare_plugin_sources_request = Get[ImportablePythonSources]( Targets, partition.plugin_targets) prepare_python_sources_request = Get[ImportablePythonSources]( Targets, partition.targets_with_dependencies) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest( ((field_set.sources, field_set.origin) for field_set in partition.field_sets), strip_source_roots=True, )) ( pylint_pex, requirements_pex, pylint_runner_pex, config_snapshot, prepared_plugin_sources, prepared_python_sources, specified_source_files, ) = await MultiGet( pylint_pex_request, requirements_pex_request, pylint_runner_pex_request, config_snapshot_request, prepare_plugin_sources_request, prepare_python_sources_request, specified_source_files_request, ) prefixed_plugin_sources = (await Get[Digest](AddPrefix( prepared_plugin_sources.snapshot.digest, "__plugins")) if pylint.source_plugins else EMPTY_DIGEST) input_digest = await Get[Digest](MergeDigests(( pylint_pex.digest, requirements_pex.digest, pylint_runner_pex.digest, config_snapshot.digest, prefixed_plugin_sources, prepared_python_sources.snapshot.digest, )), ) address_references = ", ".join( sorted(field_set.address.reference() for field_set in partition.field_sets)) process = pylint_runner_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./pylint_runner.pex", # NB: Pylint source plugins must be explicitly loaded via PYTHONPATH. The value must # point to the plugin's directory, rather than to a parent's directory, because # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but # not `path.to.plugin`. (This means users must have specified the parent directory as a # source root.) env={"PYTHONPATH": "./__plugins"} if pylint.source_plugins else None, pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_digest=input_digest, description= (f"Run Pylint on {pluralize(len(partition.field_sets), 'target')}: {address_references}." ), ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result, linter_name="Pylint")
async def pylint_lint( field_sets: PylintFieldSets, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.skip: return LintResult.noop() # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. addresses_with_dependencies = [] for field_set in field_sets: addresses_with_dependencies.append(field_set.address) addresses_with_dependencies.extend(field_set.dependencies.value or ()) targets = await Get[Targets](Addresses(addresses_with_dependencies)) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (field_set.compatibility for field_set in field_sets), python_setup) # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies. # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`. pylint_pex_request = Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), )) requirements_pex_request = Get[Pex](PexRequest( output_filename="requirements.pex", requirements=PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in targets if tgt.has_field(PythonRequirementsField)), interpreter_constraints=interpreter_constraints, )) pylint_runner_pex_request = Get[Pex]( PexRequest( output_filename="pylint_runner.pex", entry_point=pylint.get_entry_point(), interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any Pylint transitive requirements will shadow corresponding user # requirements which could lead to problems. ":".join(["pylint.pex", "requirements.pex"]), ), )) config_snapshot_request = Get[Snapshot](PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) prepare_python_sources_request = Get[ImportablePythonSources](Targets, targets) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest( ((field_set.sources, field_set.origin) for field_set in field_sets), strip_source_roots=True, )) ( pylint_pex, requirements_pex, pylint_runner_pex, config_snapshot, prepared_python_sources, specified_source_files, ) = cast( Tuple[Pex, Pex, Pex, Snapshot, ImportablePythonSources, SourceFiles], await MultiGet([ pylint_pex_request, requirements_pex_request, pylint_runner_pex_request, config_snapshot_request, prepare_python_sources_request, specified_source_files_request, ]), ) input_digest = await Get[Digest](MergeDigests(( pylint_pex.digest, requirements_pex.digest, pylint_runner_pex.digest, config_snapshot.digest, prepared_python_sources.snapshot.digest, )), ) address_references = ", ".join( sorted(field_set.address.reference() for field_set in field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint_runner.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_digest=input_digest, description= f"Run Pylint on {pluralize(len(field_sets), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result, linter_name="Pylint")