async def setup(setup_request: SetupRequest, isort: Isort) -> Setup: requirements_pex_request = Get( Pex, PexRequest( output_filename="isort.pex", requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), entry_point=isort.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=isort.config, glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--isort-config`", ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, requirements_pex, config_digest = await MultiGet( source_files_request, requirements_pex_request, config_digest_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests((source_files_snapshot.digest, requirements_pex.digest, config_digest)), ) address_references = ", ".join( sorted(field_set.address.spec for field_set in setup_request.request.field_sets)) process = await Get( Process, PexProcess( requirements_pex, argv=generate_args(source_files=source_files, isort=isort, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= (f"Run isort on {pluralize(len(setup_request.request.field_sets), 'target')}: " f"{address_references}."), ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: requirements_pex_request = Get[Pex](PexRequest( output_filename="flake8.pex", requirements=PexRequirements(flake8.get_requirement_specs()), interpreter_constraints=(partition.interpreter_constraints or PexInterpreterConstraints( flake8.default_interpreter_constraints)), entry_point=flake8.get_entry_point(), )) config_path: Optional[str] = flake8.options.config config_snapshot_request = Get[Snapshot](PathGlobs( globs=[config_path] if config_path else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", )) all_source_files_request = Get[SourceFiles](AllSourceFilesRequest( field_set.sources for field_set in partition.field_sets)) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest((field_set.sources, field_set.origin) for field_set in partition.field_sets)) requirements_pex, config_snapshot, all_source_files, specified_source_files = cast( Tuple[Pex, Snapshot, SourceFiles, SourceFiles], await MultiGet([ requirements_pex_request, config_snapshot_request, all_source_files_request, specified_source_files_request, ]), ) input_digest = await Get[Digest](MergeDigests( (all_source_files.snapshot.digest, requirements_pex.digest, config_snapshot.digest))) address_references = ", ".join( sorted(field_set.address.reference() for field_set in partition.field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./flake8.pex", pex_args=generate_args(specified_source_files=specified_source_files, flake8=flake8), input_digest=input_digest, description= (f"Run Flake8 on {pluralize(len(partition.field_sets), 'target')}: {address_references}." ), ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result, linter_name="Flake8")
async def setup( request: SetupRequest, isort: Isort, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: adaptors_with_origins = request.formatter.adaptors_with_origins requirements_pex = await Get[Pex](PexRequest( output_filename="isort.pex", requirements=PexRequirements(isort.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( isort.default_interpreter_constraints), entry_point=isort.get_entry_point(), )) config_path: Optional[List[str]] = isort.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=config_path or (), glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--isort-config`", )) if request.formatter.prior_formatter_result is None: all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest( adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins)) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.formatter.prior_formatter_result specified_source_files = await Get[SourceFiles]( LegacySpecifiedSourceFilesRequest(adaptors_with_origins)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files_snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) process_request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./isort.pex", pex_args=generate_args( specified_source_files=specified_source_files, isort=isort, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description=f"Run isort for {address_references}", ) return Setup(process_request)
async def setup( setup_request: SetupRequest, docformatter: Docformatter, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: requirements_pex_request = Get[Pex](PexRequest( output_filename="docformatter.pex", requirements=PexRequirements(docformatter.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( docformatter.default_interpreter_constraints), entry_point=docformatter.get_entry_point(), )) all_source_files_request = Get[SourceFiles](AllSourceFilesRequest( field_set.sources for field_set in setup_request.request.field_sets)) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest( (field_set.sources, field_set.origin) for field_set in setup_request.request.field_sets)) requests: List[Get] = [ requirements_pex_request, specified_source_files_request ] if setup_request.request.prior_formatter_result is None: requests.append(all_source_files_request) requirements_pex, specified_source_files, *rest = cast( Union[Tuple[Pex, SourceFiles], Tuple[Pex, SourceFiles, SourceFiles]], await MultiGet(requests), ) all_source_files_snapshot = ( setup_request.request.prior_formatter_result if setup_request.request.prior_formatter_result else rest[0].snapshot) input_digest = await Get[Digest](MergeDigests( (all_source_files_snapshot.digest, requirements_pex.digest))) address_references = ", ".join( sorted(field_set.address.reference() for field_set in setup_request.request.field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./docformatter.pex", pex_args=generate_args( specified_source_files=specified_source_files, docformatter=docformatter, check_only=setup_request.check_only, ), input_digest=input_digest, output_files=all_source_files_snapshot.files, description= (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'target')}: " f"{address_references}."), ) return Setup(process, original_digest=all_source_files_snapshot.digest)
async def setup( request: SetupRequest, black: Black, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: requirements_pex = await Get[Pex](PexRequest( output_filename="black.pex", requirements=PexRequirements(black.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( black.default_interpreter_constraints), entry_point=black.get_entry_point(), )) config_path: Optional[str] = black.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--black-config`", )) if request.configs.prior_formatter_result is None: all_source_files = await Get[SourceFiles](AllSourceFilesRequest( config.sources for config in request.configs)) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.configs.prior_formatter_result specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( (config.sources, config.origin) for config in request.configs)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files_snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(config.address.reference() for config in request.configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./black.pex", pex_args=generate_args( specified_source_files=specified_source_files, black=black, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description= (f"Run Black on {pluralize(len(request.configs), 'target')}: {address_references}." ), ) return Setup(process)
async def bandit_lint( configs: BanditConfigurations, bandit: Bandit, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if bandit.options.skip: return LintResult.noop() # NB: Bandit output depends upon which Python interpreter version it's run with. See # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup=python_setup ) requirements_pex = await Get[Pex]( PexRequest( output_filename="bandit.pex", requirements=PexRequirements(bandit.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=bandit.get_entry_point(), ) ) config_path: Optional[str] = bandit.options.config config_snapshot = await Get[Snapshot]( PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--bandit-config`", ) ) all_source_files = await Get[SourceFiles]( AllSourceFilesRequest(config.sources for config in configs) ) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest((config.sources, config.origin) for config in configs) ) merged_input_files = await Get[Digest]( MergeDigests( (all_source_files.snapshot.digest, requirements_pex.digest, config_snapshot.digest) ), ) address_references = ", ".join(sorted(config.address.reference() for config in configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./bandit.pex", pex_args=generate_args(specified_source_files=specified_source_files, bandit=bandit), input_files=merged_input_files, description=f"Run Bandit on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result)
async def setup_lambdex(lambdex: Lambdex) -> LambdexSetup: requirements_pex = await Get[Pex](PexRequest( output_filename="lambdex.pex", requirements=PexRequirements(lambdex.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( lambdex.default_interpreter_constraints), entry_point=lambdex.get_entry_point(), )) return LambdexSetup(requirements_pex=requirements_pex, )
async def setup_black(setup_request: SetupRequest, black: Black) -> Setup: requirements_pex_request = Get( Pex, PexRequest( output_filename="black.pex", internal_only=True, requirements=PexRequirements(black.all_requirements), interpreter_constraints=PexInterpreterConstraints( black.interpreter_constraints), entry_point=black.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[black.config] if black.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--black-config`", ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, requirements_pex, config_digest = await MultiGet( source_files_request, requirements_pex_request, config_digest_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests((source_files_snapshot.digest, requirements_pex.digest, config_digest)), ) process = await Get( Process, PexProcess( requirements_pex, argv=generate_args(source_files=source_files, black=black, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def setup( request: SetupRequest, isort: Isort, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: requirements_pex = await Get[Pex](PexRequest( output_filename="isort.pex", requirements=PexRequirements(isort.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( isort.default_interpreter_constraints), entry_point=isort.get_entry_point(), )) config_path: Optional[List[str]] = isort.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=config_path or (), glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--isort-config`", )) if request.configs.prior_formatter_result is None: all_source_files = await Get[SourceFiles](AllSourceFilesRequest( config.sources for config in request.configs)) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.configs.prior_formatter_result specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( (config.sources, config.origin) for config in request.configs)) merged_input_files = await Get[Digest](MergeDigests( (all_source_files_snapshot.digest, requirements_pex.digest, config_snapshot.digest)), ) address_references = ", ".join( sorted(config.address.reference() for config in request.configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./isort.pex", pex_args=generate_args( specified_source_files=specified_source_files, isort=isort, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description= (f"Run isort on {pluralize(len(request.configs), 'target')}: {address_references}." ), ) return Setup(process, original_digest=all_source_files_snapshot.digest)
async def setup_setuptools(setuptools: Setuptools) -> SetuptoolsSetup: # Note that this pex has no entrypoint. We use it to run our generated setup.py, which # in turn imports from and invokes setuptools. requirements_pex = await Get[Pex](PexRequest( output_filename="setuptools.pex", requirements=PexRequirements(setuptools.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( setuptools.default_interpreter_constraints), )) return SetuptoolsSetup(requirements_pex=requirements_pex, )
async def flake8_lint( configs: Flake8Configurations, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if flake8.options.skip: return LintResult.noop() # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://flake8.pycqa.org/en/latest/user/invocation.html. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup) requirements_pex = await Get[Pex](PexRequest( output_filename="flake8.pex", requirements=PexRequirements(flake8.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=flake8.get_entry_point(), )) config_path: Optional[str] = flake8.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", )) all_source_files = await Get[SourceFiles](AllSourceFilesRequest( config.sources for config in configs)) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( (config.sources, config.origin) for config in configs)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(config.address.reference() for config in configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./flake8.pex", pex_args=generate_args(specified_source_files=specified_source_files, flake8=flake8), input_files=merged_input_files, description= f"Run Flake8 on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result)
def _create_pex(self) -> Pex: return self.request_single_product( Pex, Params( PexRequest( output_filename="setup-py-runner.pex", requirements=PexRequirements(["setuptools==44.0.0", "wheel==0.34.2"]), ), create_options_bootstrapper(args=["--backend-packages=pants.backend.python"]), ), )
async def setup_coverage(coverage: CoverageSubsystem) -> CoverageSetup: pex = await Get( Pex, PexRequest( output_filename="coverage.pex", internal_only=True, requirements=PexRequirements(coverage.all_requirements), interpreter_constraints=PexInterpreterConstraints(coverage.interpreter_constraints), entry_point=coverage.entry_point, ), ) return CoverageSetup(pex)
async def setup_lambdex(lambdex: Lambdex) -> LambdexSetup: requirements_pex = await Get( Pex, PexRequest( output_filename="lambdex.pex", requirements=PexRequirements(lambdex.all_requirements), interpreter_constraints=PexInterpreterConstraints( lambdex.interpreter_constraints), entry_point=lambdex.entry_point, ), ) return LambdexSetup(requirements_pex=requirements_pex, )
async def create_ipython_repl_request( repl: IPythonRepl, ipython: IPython, pex_env: PexEnvironment ) -> ReplRequest: # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in ipython_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( Addresses(tgt.address for tgt in repl.targets), internal_only=True ), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True) ) ipython_request = Get( Pex, PexRequest( output_filename="ipython.pex", entry_point=ipython.entry_point, requirements=PexRequirements(ipython.all_requirements), interpreter_constraints=requirements_pex_request.interpreter_constraints, internal_only=True, ), ) requirements_pex, sources, ipython_pex = await MultiGet( requirements_request, sources_request, ipython_request ) merged_digest = await Get( Digest, MergeDigests( (requirements_pex.digest, sources.source_files.snapshot.digest, ipython_pex.digest) ), ) args = [repl.in_chroot(ipython_pex.name)] if ipython.options.ignore_cwd: args.append("--ignore-cwd") chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict, "PEX_PATH": repl.in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def setup_coverage(coverage: PytestCoverage) -> CoverageSetup: plugin_file_digest = await Get[Digest](InputFilesContent, COVERAGE_PLUGIN_INPUT) output_pex_filename = "coverage.pex" requirements_pex = await Get[Pex](PexRequest( output_filename=output_pex_filename, requirements=PexRequirements(coverage.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( coverage.default_interpreter_constraints), entry_point=coverage.get_entry_point(), sources=plugin_file_digest, )) return CoverageSetup(requirements_pex)
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get[TransitiveTargets](Addresses, request.addresses) all_targets = transitive_targets.closure python_targets = [] resource_targets = [] python_requirement_fields = [] for tgt in all_targets: if tgt.has_field(PythonSources): python_targets.append(tgt) if tgt.has_field(PythonRequirementsField): python_requirement_fields.append(tgt[PythonRequirementsField]) # NB: PythonRequirementsFileSources is a subclass of FilesSources. We filter it out so that # requirements.txt is not included in the PEX and so that irrelevant changes to it (e.g. # whitespace changes) do not invalidate the PEX. if tgt.has_field(ResourcesSources) or ( tgt.has_field(FilesSources) and not tgt.has_field(PythonRequirementsFileSources)): resource_targets.append(tgt) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt.get(PythonInterpreterCompatibility) for tgt in python_targets), python_setup) input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( Targets(python_targets + resource_targets)) input_digests.append(prepared_sources.snapshot.directory_digest) merged_input_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(input_digests))) requirements = PexRequirements.create_from_requirement_fields( python_requirement_fields, additional_requirements=request.additional_requirements) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def setup( request: SetupRequest, docformatter: Docformatter, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: adaptors_with_origins = request.formatter.adaptors_with_origins requirements_pex = await Get[Pex](PexRequest( output_filename="docformatter.pex", requirements=PexRequirements(docformatter.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( docformatter.default_interpreter_constraints), entry_point=docformatter.get_entry_point(), )) if request.formatter.prior_formatter_result is None: all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest( adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins)) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.formatter.prior_formatter_result specified_source_files = await Get[SourceFiles]( LegacySpecifiedSourceFilesRequest(adaptors_with_origins)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files_snapshot.directory_digest, requirements_pex.directory_digest, )), ) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) process_request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./docformatter.pex", pex_args=generate_args( specified_source_files=specified_source_files, docformatter=docformatter, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description=f"Run docformatter for {address_references}", ) return Setup(process_request)
async def setup_setuptools(setuptools: Setuptools) -> SetuptoolsSetup: # Note that this pex has no entrypoint. We use it to run our generated setup.py, which # in turn imports from and invokes setuptools. requirements_pex = await Get( Pex, PexRequest( output_filename="setuptools.pex", internal_only=True, requirements=PexRequirements(setuptools.all_requirements), interpreter_constraints=PexInterpreterConstraints( setuptools.interpreter_constraints), ), ) return SetuptoolsSetup(requirements_pex=requirements_pex, )
async def setup_docformatter(setup_request: SetupRequest, docformatter: Docformatter) -> Setup: requirements_pex_request = Get( Pex, PexRequest( output_filename="docformatter.pex", internal_only=True, requirements=PexRequirements(docformatter.all_requirements), interpreter_constraints=PexInterpreterConstraints( docformatter.interpreter_constraints), entry_point=docformatter.entry_point, ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, requirements_pex = await MultiGet(source_files_request, requirements_pex_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests((source_files_snapshot.digest, requirements_pex.digest))) process = await Get( Process, PexProcess( requirements_pex, argv=generate_args( source_files=source_files, docformatter=docformatter, check_only=setup_request.check_only, ), input_digest=input_digest, output_files=source_files_snapshot.files, description= (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}." ), level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def setup( request: SetupRequest, docformatter: Docformatter, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: requirements_pex = await Get[Pex](PexRequest( output_filename="docformatter.pex", requirements=PexRequirements(docformatter.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( docformatter.default_interpreter_constraints), entry_point=docformatter.get_entry_point(), )) if request.configs.prior_formatter_result is None: all_source_files = await Get[SourceFiles](AllSourceFilesRequest( config.sources for config in request.configs)) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.configs.prior_formatter_result specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( (config.sources, config.origin) for config in request.configs)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files_snapshot.directory_digest, requirements_pex.directory_digest, )), ) address_references = ", ".join( sorted(config.address.reference() for config in request.configs)) process = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./docformatter.pex", pex_args=generate_args( specified_source_files=specified_source_files, docformatter=docformatter, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description=( f"Run Docformatter on {pluralize(len(request.configs), 'target')}: " f"{address_references}."), ) return Setup(process)
def create_pex_and_get_all_data( self, *, requirements=PexRequirements(), entry_point=None, interpreter_constraints=PexInterpreterConstraints(), platforms=PexPlatforms(), sources: Optional[Digest] = None, additional_inputs: Optional[Digest] = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), ) -> Dict: request = PexRequest( output_filename="test.pex", requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=platforms, entry_point=entry_point, sources=sources, additional_inputs=additional_inputs, additional_args=additional_pex_args, ) pex = self.request_single_product( Pex, Params( request, create_options_bootstrapper(args=[ "--backend-packages2=pants.backend.python", *additional_pants_args ]), ), ) self.scheduler.materialize_directory(DirectoryToMaterialize( pex.digest)) pex_path = os.path.join(self.build_root, "test.pex") with zipfile.ZipFile(pex_path, "r") as zipfp: with zipfp.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() pex_list = zipfp.namelist() return { "pex": pex, "local_path": pex_path, "info": json.loads(pex_info_content), "files": pex_list, }
async def legacy_pex_from_targets(request: LegacyPexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_hydrated_targets = await Get[TransitiveHydratedTargets]( Addresses, request.addresses) all_targets = transitive_hydrated_targets.closure python_targets = [ t for t in all_targets if isinstance(t.adaptor, PythonTargetAdaptor) ] resource_targets = [ t for t in all_targets if isinstance(t.adaptor, (FilesAdaptor, ResourcesAdaptor)) ] all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=all_target_adaptors, python_setup=python_setup) input_digests = [] if request.additional_input_files: input_digests.append(request.additional_input_files) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( HydratedTargets(python_targets + resource_targets)) input_digests.append(prepared_sources.snapshot.directory_digest) merged_input_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(input_digests))) requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=request.additional_requirements) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=request.entry_point, input_files_digest=merged_input_digest, additional_args=request.additional_args, )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get[TransitiveTargets](Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( Targets(all_targets)) input_digests.append(prepared_sources.snapshot.digest) merged_input_digest = await Get[Digest](MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) requirements = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, ) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def pylint_lint( configs: PylintConfigurations, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.options.skip: return LintResult.noop() # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. addresses = [] for config in configs: addresses.append(config.address) addresses.extend(config.dependencies.value or ()) targets = await Get[Targets](Addresses(addresses)) chrooted_python_sources = await Get[ImportablePythonSources](Targets, targets) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup) requirements_pex = await Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), )) config_path: Optional[str] = pylint.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( requirements_pex.directory_digest, config_snapshot.directory_digest, chrooted_python_sources.snapshot.directory_digest, )), ) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( ((config.sources, config.origin) for config in configs), strip_source_roots=True)) address_references = ", ".join( sorted(config.address.reference() for config in configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_files=merged_input_files, description= f"Run Pylint on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result)
async def setup( request: SetupRequest, black: Black, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: requirements_pex_request = Get[Pex](PexRequest( output_filename="black.pex", requirements=PexRequirements(black.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( black.default_interpreter_constraints), entry_point=black.get_entry_point(), )) config_path: Optional[str] = black.options.config config_snapshot_request = Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--black-config`", )) all_source_files_request = Get[SourceFiles](AllSourceFilesRequest( field_set.sources for field_set in request.field_sets)) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest((field_set.sources, field_set.origin) for field_set in request.field_sets)) requests: List[Get] = [ requirements_pex_request, config_snapshot_request, specified_source_files_request, ] if request.field_sets.prior_formatter_result is None: requests.append(all_source_files_request) requirements_pex, config_snapshot, specified_source_files, *rest = cast( Union[Tuple[Pex, Snapshot, SourceFiles], Tuple[Pex, Snapshot, SourceFiles, SourceFiles]], await MultiGet(requests), ) all_source_files_snapshot = (request.field_sets.prior_formatter_result if request.field_sets.prior_formatter_result else rest[0].snapshot) input_digest = await Get[Digest](MergeDigests( (all_source_files_snapshot.digest, requirements_pex.digest, config_snapshot.digest))) address_references = ", ".join( sorted(field_set.address.reference() for field_set in request.field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./black.pex", pex_args=generate_args( specified_source_files=specified_source_files, black=black, check_only=request.check_only, ), input_digest=input_digest, output_files=all_source_files_snapshot.files, description= (f"Run Black on {pluralize(len(request.field_sets), 'target')}: {address_references}." ), ) return Setup(process, original_digest=all_source_files_snapshot.digest)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, lint_subsystem: LintSubsystem ) -> LintResult: requirements_pex_request = Get( Pex, PexRequest( output_filename="flake8.pex", internal_only=True, requirements=PexRequirements(flake8.all_requirements), interpreter_constraints=( partition.interpreter_constraints or PexInterpreterConstraints(flake8.interpreter_constraints) ), entry_point=flake8.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[flake8.config] if flake8.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets) ) requirements_pex, config_digest, source_files = await MultiGet( requirements_pex_request, config_digest_request, source_files_request ) input_digest = await Get( Digest, MergeDigests((source_files.snapshot.digest, requirements_pex.digest, config_digest)), ) report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None result = await Get( FallibleProcessResult, PexProcess( requirements_pex, argv=generate_args( source_files=source_files, flake8=flake8, report_file_name=report_file_name ), input_digest=input_digest, output_files=(report_file_name,) if report_file_name else None, description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) report = None if report_file_name: report_digest = await Get( Digest, DigestSubset( result.output_digest, PathGlobs( [report_file_name], glob_match_error_behavior=GlobMatchErrorBehavior.warn, description_of_origin="Flake8 report file", ), ), ) report = LintReport(report_file_name, report_digest) return LintResult.from_fallible_process_result( result, partition_description=str(sorted(partition.interpreter_constraints)), report=report )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets) ) input_digests.append(prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility) ), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, ) requirements = exact_reqs description = request.description if python_setup.requirement_constraints: # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names exact_req_projects = { canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements(next(iter(constraints_file_contents)).content.decode()) ) constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or ( python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES and request.internal_only ): if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope " "because constraints file does not cover all requirements." ) else: requirements = PexRequirements(str(req) for req in constraints_file_reqs) description = description or f"Resolving {python_setup.requirement_constraints}" elif ( python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( f"[python-setup].resolve_all_constraints is set to " f"{python_setup.resolve_all_constraints.value}, so " f"[python-setup].requirement_constraints must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=description, )
async def mypy_typecheck(request: MyPyRequest, mypy: MyPy) -> TypecheckResults: if mypy.skip: return TypecheckResults([], typechecker_name="MyPy") transitive_targets = await Get( TransitiveTargets, Addresses(fs.address for fs in request.field_sets)) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure), ) pex_request = Get( Pex, PexRequest( output_filename="mypy.pex", internal_only=True, requirements=PexRequirements(mypy.all_requirements), # NB: This only determines what MyPy is run with. The user can specify what version # their code is with `--python-version`. See # https://mypy.readthedocs.io/en/stable/config_file.html#platform-configuration. We do # not auto-configure this for simplicity and to avoid Pants magically setting values for # users. interpreter_constraints=PexInterpreterConstraints( mypy.interpreter_constraints), entry_point=mypy.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[mypy.config] if mypy.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--mypy-config`", ), ) prepared_sources, pex, config_digest = await MultiGet( prepared_sources_request, pex_request, config_digest_request) srcs_snapshot = prepared_sources.source_files.snapshot file_list_path = "__files.txt" python_files = "\n".join(f for f in srcs_snapshot.files if f.endswith(".py")) file_list_digest = await Get( Digest, CreateDigest([FileContent(file_list_path, python_files.encode())]), ) merged_input_files = await Get( Digest, MergeDigests([ file_list_digest, srcs_snapshot.digest, pex.digest, config_digest ]), ) result = await Get( FallibleProcessResult, PexProcess( pex, argv=generate_args(mypy, file_list_path=file_list_path), input_digest=merged_input_files, extra_env={ "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots) }, description= f"Run MyPy on {pluralize(len(srcs_snapshot.files), 'file')}.", level=LogLevel.DEBUG, ), ) return TypecheckResults( [TypecheckResult.from_fallible_process_result(result)], typechecker_name="MyPy")
async def lint( linter: PylintLinter, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.options.skip: return LintResult.noop() adaptors_with_origins = linter.adaptors_with_origins # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. hydrated_targets = [ HydratedTarget(adaptor_with_origin.adaptor) for adaptor_with_origin in adaptors_with_origins ] dependencies = await MultiGet( Get[HydratedTarget](Address, dependency) for dependency in itertools.chain.from_iterable( ht.adaptor.dependencies for ht in hydrated_targets)) chrooted_python_sources = await Get[ImportablePythonSources]( HydratedTargets([*hydrated_targets, *dependencies])) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( (adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins), python_setup=python_setup, ) requirements_pex = await Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), )) config_path: Optional[str] = pylint.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( requirements_pex.directory_digest, config_snapshot.directory_digest, chrooted_python_sources.snapshot.directory_digest, )), ) specified_source_files = await Get[SourceFiles]( LegacySpecifiedSourceFilesRequest(adaptors_with_origins, strip_source_roots=True)) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_files=merged_input_files, description=f"Run Pylint for {address_references}", ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)