async def docformatter_lint(configs: DocformatterConfigurations, docformatter: Docformatter) -> LintResult: if docformatter.options.skip: return LintResult.noop() setup = await Get[Setup](SetupRequest(configs, check_only=True)) result = await Get[FallibleProcessResult](Process, setup.process) return LintResult.from_fallible_process_result(result)
async def isort_lint(field_sets: IsortFieldSets, isort: Isort) -> LintResult: if isort.options.skip: return LintResult.noop() setup = await Get[Setup](SetupRequest(field_sets, check_only=True)) result = await Get[FallibleProcessResult](Process, setup.process) return LintResult.from_fallible_process_result(result, linter_name="isort", strip_chroot_path=True)
async def docformatter_lint(field_sets: DocformatterFieldSets, docformatter: Docformatter) -> LintResult: if docformatter.options.skip: return LintResult.noop() setup = await Get[Setup](SetupRequest(field_sets, check_only=True)) result = await Get[FallibleProcessResult](Process, setup.process) return LintResult.from_fallible_process_result(result, linter_name="Docformatter")
async def black_lint(field_sets: BlackFieldSets, black: Black) -> LintResult: if black.options.skip: return LintResult.noop() setup = await Get[Setup](SetupRequest(field_sets, check_only=True)) result = await Get[FallibleProcessResult](Process, setup.process) return LintResult.from_fallible_process_result(result, linter_name="Black", strip_chroot_path=True)
async def bandit_lint( configs: BanditConfigurations, bandit: Bandit, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if bandit.options.skip: return LintResult.noop() # NB: Bandit output depends upon which Python interpreter version it's run with. See # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup=python_setup ) requirements_pex = await Get[Pex]( PexRequest( output_filename="bandit.pex", requirements=PexRequirements(bandit.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=bandit.get_entry_point(), ) ) config_path: Optional[str] = bandit.options.config config_snapshot = await Get[Snapshot]( PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--bandit-config`", ) ) all_source_files = await Get[SourceFiles]( AllSourceFilesRequest(config.sources for config in configs) ) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest((config.sources, config.origin) for config in configs) ) merged_input_files = await Get[Digest]( MergeDigests( (all_source_files.snapshot.digest, requirements_pex.digest, config_snapshot.digest) ), ) address_references = ", ".join(sorted(config.address.reference() for config in configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./bandit.pex", pex_args=generate_args(specified_source_files=specified_source_files, bandit=bandit), input_files=merged_input_files, description=f"Run Bandit on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result)
async def flake8_lint( configs: Flake8Configurations, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if flake8.options.skip: return LintResult.noop() # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://flake8.pycqa.org/en/latest/user/invocation.html. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup) requirements_pex = await Get[Pex](PexRequest( output_filename="flake8.pex", requirements=PexRequirements(flake8.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=flake8.get_entry_point(), )) config_path: Optional[str] = flake8.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", )) all_source_files = await Get[SourceFiles](AllSourceFilesRequest( config.sources for config in configs)) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( (config.sources, config.origin) for config in configs)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(config.address.reference() for config in configs)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./flake8.pex", pex_args=generate_args(specified_source_files=specified_source_files, flake8=flake8), input_files=merged_input_files, description= f"Run Flake8 on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result)
async def run_buf(request: BufRequest, buf: BufSubsystem) -> LintResults: if buf.skip: return LintResults([], linter_name=request.name) transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest( (field_set.address for field_set in request.field_sets)), ) all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (field_set.sources for field_set in request.field_sets), for_sources_types=(ProtobufSourceField, ), enable_codegen=True, ), ) download_buf_get = Get(DownloadedExternalTool, ExternalToolRequest, buf.get_request(Platform.current)) target_sources_stripped, all_sources_stripped, downloaded_buf = await MultiGet( target_stripped_sources_request, all_stripped_sources_request, download_buf_get) input_digest = await Get( Digest, MergeDigests(( target_sources_stripped.snapshot.digest, all_sources_stripped.snapshot.digest, downloaded_buf.digest, )), ) process_result = await Get( FallibleProcessResult, Process( argv=[ downloaded_buf.exe, "lint", *buf.args, "--path", ",".join(target_sources_stripped.snapshot.files), ], input_digest=input_digest, description= f"Run Buf on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) result = LintResult.from_fallible_process_result(process_result) return LintResults([result], linter_name=request.name)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: requirements_pex_request = Get[Pex](PexRequest( output_filename="flake8.pex", requirements=PexRequirements(flake8.get_requirement_specs()), interpreter_constraints=(partition.interpreter_constraints or PexInterpreterConstraints( flake8.default_interpreter_constraints)), entry_point=flake8.get_entry_point(), )) config_path: Optional[str] = flake8.options.config config_snapshot_request = Get[Snapshot](PathGlobs( globs=[config_path] if config_path else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", )) all_source_files_request = Get[SourceFiles](AllSourceFilesRequest( field_set.sources for field_set in partition.field_sets)) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest((field_set.sources, field_set.origin) for field_set in partition.field_sets)) requirements_pex, config_snapshot, all_source_files, specified_source_files = cast( Tuple[Pex, Snapshot, SourceFiles, SourceFiles], await MultiGet([ requirements_pex_request, config_snapshot_request, all_source_files_request, specified_source_files_request, ]), ) input_digest = await Get[Digest](MergeDigests( (all_source_files.snapshot.digest, requirements_pex.digest, config_snapshot.digest))) address_references = ", ".join( sorted(field_set.address.reference() for field_set in partition.field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./flake8.pex", pex_args=generate_args(specified_source_files=specified_source_files, flake8=flake8), input_digest=input_digest, description= (f"Run Flake8 on {pluralize(len(partition.field_sets), 'target')}: {address_references}." ), ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result, linter_name="Flake8")
async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: if shfmt.skip: return LintResults([], linter_name="shfmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="shfmt")
async def run_helm_lint(request: HelmLintRequest, helm_subsystem: HelmSubsystem) -> LintResults: charts = await MultiGet( Get(HelmChart, HelmChartRequest(field_set)) for field_set in request.field_sets if not field_set.skip_lint.value) logger.debug(f"Linting {pluralize(len(charts), 'chart')}...") def create_process(chart: HelmChart, field_set: HelmLintFieldSet) -> HelmProcess: argv = ["lint", chart.path] strict: bool = field_set.lint_strict.value or helm_subsystem.lint_strict if strict: argv.append("--strict") return HelmProcess( argv, input_digest=chart.snapshot.digest, description=f"Linting chart: {chart.metadata.name}", ) process_results = await MultiGet( Get( FallibleProcessResult, HelmProcess, create_process(chart, field_set), ) for chart, field_set in zip(charts, request.field_sets)) results = [ LintResult.from_fallible_process_result( process_result, partition_description=chart.metadata.name) for chart, process_result in zip(charts, process_results) ] return LintResults(results, linter_name=request.name)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, lint_subsystem: LintSubsystem ) -> LintResult: flake8_pex_get = Get( VenvPex, PexRequest( output_filename="flake8.pex", internal_only=True, requirements=PexRequirements(flake8.all_requirements), interpreter_constraints=partition.interpreter_constraints, main=flake8.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets) ) flake8_pex, config_files, source_files = await MultiGet( flake8_pex_get, config_files_get, source_files_get ) input_digest = await Get( Digest, MergeDigests((source_files.snapshot.digest, config_files.snapshot.digest)) ) report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None result = await Get( FallibleProcessResult, VenvPexProcess( flake8_pex, argv=generate_argv(source_files, flake8, report_file_name=report_file_name), input_digest=input_digest, output_files=(report_file_name,) if report_file_name else None, description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) report = None if report_file_name: report_digest = await Get( Digest, DigestSubset( result.output_digest, PathGlobs( [report_file_name], glob_match_error_behavior=GlobMatchErrorBehavior.warn, description_of_origin="Flake8 report file", ), ), ) report = LintReport(report_file_name, report_digest) return LintResult.from_fallible_process_result( result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
async def pyupgrade_lint(result: PyUpgradeResult, pyupgrade: PyUpgrade) -> LintResults: if pyupgrade.skip: return LintResults([], linter_name="pyupgrade") return LintResults( [LintResult.from_fallible_process_result(result.process_result)], linter_name="pyupgrade", )
def test_precise_file_args(self) -> None: target = self.make_target_with_origin( [self.good_source, self.bad_source], origin=FilesystemLiteralSpec(self.good_source.path)) lint_result, fmt_result = self.run_docformatter([target]) assert lint_result == LintResult.noop() assert fmt_result.digest == self.get_digest( [self.good_source, self.bad_source])
async def docformatter_lint(request: DocformatterRequest, docformatter: Docformatter) -> LintResults: if docformatter.skip: return LintResults([], linter_name="Docformatter") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="Docformatter")
async def isort_lint(request: IsortRequest, isort: Isort) -> LintResults: if isort.skip: return LintResults([], linter_name="isort") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result, strip_chroot_path=True)], linter_name="isort", )
def test_streaming_output_partitions() -> None: results = LintResults( [ LintResult(21, "", "", partition_description="ghc8.1"), LintResult(0, "stdout", "stderr", partition_description="ghc9.2"), ], linter_name="linter", ) assert results.level() == LogLevel.ERROR assert results.message() == dedent("""\ linter failed (exit code 21). Partition #1 - ghc8.1: Partition #2 - ghc9.2: stdout stderr """)
async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: if shfmt.options.skip: return LintResults([], linter_name="shfmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) # We use `FallibleProcessResult`, rather than `ProcessResult`, because we're okay with the # Process failing. result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="shfmt")
async def yapf_lint(request: YapfRequest, yapf: Yapf) -> LintResults: if yapf.skip: return LintResults([], linter_name="yapf") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result)], linter_name="yapf", )
async def black_lint(field_sets: BlackRequest, black: Black) -> LintResults: if black.skip: return LintResults([], linter_name="Black") setup = await Get(Setup, SetupRequest(field_sets, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result, strip_chroot_path=True)], linter_name="Black", )
async def run_terraform_validate( request: ValidateRequest, subsystem: TerraformValidateSubsystem) -> LintResults: if subsystem.options.skip: return LintResults([], linter_name="terraform validate") sources_files = await Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in request.field_sets), ) # `terraform validate` operates on a directory-by-directory basis. First determine the directories in # the snapshot. This does not use `source_files_snapshot.dirs` because that will be empty if the files # are in a single directory. directories = defaultdict(list) for file in sources_files.snapshot.files: directory = os.path.dirname(file) if directory == "": directory = "." directories[directory].append(file) # Then create a process for each directory. directory_to_process = {} for directory, files_in_directory in directories.items(): args = [ "validate", directory, ] args = [arg for arg in args if arg] process = TerraformProcess( args=tuple(args), input_digest=sources_files.snapshot.digest, output_files=tuple(files_in_directory), description= f"Run `terraform validate` on {pluralize(len(files_in_directory), 'file')}.", ) directory_to_process[directory] = process results = await MultiGet( Get(FallibleProcessResult, TerraformProcess, process) for process in directory_to_process.values()) lint_results = [] for directory, result in zip(directory_to_process.keys(), results): lint_result = LintResult( exit_code=result.exit_code, stdout=result.stdout.decode(), stderr=result.stderr.decode(), partition_description=f"`terraform validate` on `{directory}`", ) lint_results.append(lint_result) return LintResults(lint_results, linter_name="terraform validate")
async def tffmt_lint(request: TffmtRequest, tffmt: TfFmtSubsystem) -> LintResults: if tffmt.options.skip: return LintResults([], linter_name="tffmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) results = await MultiGet( Get(FallibleProcessResult, TerraformProcess, process) for directory, process in setup.directory_to_process.items() ) lint_results = [LintResult.from_fallible_process_result(result) for result in results] return LintResults(lint_results, linter_name="tffmt")
def test_streaming_output_failure() -> None: results = LintResults([LintResult(18, "stdout", "stderr")], linter_name="linter") assert results.level() == LogLevel.ERROR assert results.message() == dedent("""\ linter failed (exit code 18). stdout stderr """)
def test_streaming_output_success() -> None: results = LintResults([LintResult(0, "stdout", "stderr")], linter_name="linter") assert results.level() == LogLevel.INFO assert results.message() == dedent("""\ linter succeeded. stdout stderr """)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, first_party_plugins: Flake8FirstPartyPlugins) -> LintResult: flake8_pex_get = Get( VenvPex, PexRequest, flake8.to_pex_request( interpreter_constraints=partition.interpreter_constraints, extra_requirements=first_party_plugins.requirement_strings, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in partition.field_sets)) # Ensure that the empty report dir exists. report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)])) flake8_pex, config_files, report_directory, source_files = await MultiGet( flake8_pex_get, config_files_get, report_directory_digest_get, source_files_get) input_digest = await Get( Digest, MergeDigests(( source_files.snapshot.digest, first_party_plugins.sources_digest, config_files.snapshot.digest, report_directory, )), ) result = await Get( FallibleProcessResult, VenvPexProcess( flake8_pex, argv=generate_argv(source_files, flake8), input_digest=input_digest, output_directories=(REPORT_DIR, ), extra_env={"PEX_EXTRA_SYS_PATH": first_party_plugins.PREFIX}, concurrency_available=len(partition.field_sets), description= f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR)) return LintResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
def test_respects_passthrough_args(self) -> None: needs_config = FileContent( path="needs_config.py", content= b'"""\nOne line docstring acting like it\'s multiline.\n"""\n', ) target = self.make_target_with_origin([needs_config]) lint_result, fmt_result = self.run_docformatter( [target], passthrough_args="--make-summary-multi-line") assert lint_result == LintResult.noop() assert fmt_result.digest == self.get_digest([needs_config])
def lint_result(self) -> LintResults: addresses = [config.address for config in self.field_sets] return LintResults( [ LintResult( self.exit_code(addresses), self.stdout(addresses), "", linter_name=self.linter_name, ) ] )
async def bandit_lint_partition(partition: BanditPartition, bandit: Bandit) -> LintResult: bandit_pex_get = Get( VenvPex, PexRequest( output_filename="bandit.pex", internal_only=True, requirements=bandit.pex_requirements(), interpreter_constraints=partition.interpreter_constraints, main=bandit.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, bandit.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in partition.field_sets)) # Ensure that the empty report dir exists. report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)])) bandit_pex, config_files, report_directory, source_files = await MultiGet( bandit_pex_get, config_files_get, report_directory_digest_get, source_files_get) input_digest = await Get( Digest, MergeDigests((source_files.snapshot.digest, config_files.snapshot.digest, report_directory)), ) result = await Get( FallibleProcessResult, VenvPexProcess( bandit_pex, argv=generate_argv(source_files, bandit), input_digest=input_digest, description= f"Run Bandit on {pluralize(len(partition.field_sets), 'file')}.", output_directories=(REPORT_DIR, ), level=LogLevel.DEBUG, ), ) report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR)) return LintResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
async def scalafmt_lint(request: ScalafmtRequest, tool: ScalafmtSubsystem) -> LintResults: if tool.skip: return LintResults([], linter_name=request.name) setup = await Get(Setup, SetupRequest(request, check_only=True)) results = await MultiGet( Get(FallibleProcessResult, JvmProcess, partition.process) for partition in setup.partitions ) lint_results = [ LintResult.from_fallible_process_result(result, partition_description=partition.description) for result, partition in zip(results, setup.partitions) ] return LintResults(lint_results, linter_name=request.name)
async def run_hadolint(request: HadolintRequest, hadolint: Hadolint) -> LintResults: if hadolint.skip: return LintResults([], linter_name=request.name) downloaded_hadolint, config_files = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, hadolint.get_request(Platform.current)), Get(ConfigFiles, ConfigFilesRequest, hadolint.config_request()), ) dockerfile_infos = await MultiGet( Get(DockerfileInfo, DockerfileInfoRequest(field_set.address)) for field_set in request.field_sets) input_digest = await Get( Digest, MergeDigests(( downloaded_hadolint.digest, config_files.snapshot.digest, *(info.digest for info in dockerfile_infos), )), ) process_result = await Get( FallibleProcessResult, Process( argv=[ downloaded_hadolint.exe, *generate_argv(dockerfile_infos, hadolint) ], # Hadolint tries to read a configuration file from a few locations on the system: # https://github.com/hadolint/hadolint/blob/43d2bfe9f71dea9ddd203d5bdbd2cc1fb512e4dd/src/Hadolint/Config/Configfile.hs#L75-L101 # # We don't want it to do this in order to have reproducible results machine to machine # and there is also the problem that on some machines, an unset (as opposed to empty) # HOME env var crashes hadolint with SIGSEGV. # See: https://github.com/hadolint/hadolint/issues/741 # # As such, we set HOME to blank so no system configuration is found and, as a side # benefit, we don't crash. # # See https://github.com/pantsbuild/pants/issues/13735 for more details. env={"HOME": ""}, input_digest=input_digest, description= f"Run `hadolint` on {pluralize(len(dockerfile_infos), 'Dockerfile')}.", level=LogLevel.DEBUG, ), ) return LintResults( [LintResult.from_fallible_process_result(process_result)], linter_name=request.name)
async def gofmt_lint(request: GofmtRequest, gofmt: GofmtSubsystem) -> LintResults: if gofmt.options.skip: return LintResults([], linter_name="gofmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) lint_result = LintResult.from_fallible_process_result(result) if lint_result.exit_code == 0 and lint_result.stdout.strip() != "": # Note: gofmt returns success even if it would have reformatted the files. # When this occurs, convert the LintResult into a failure. lint_result = dataclasses.replace( lint_result, exit_code=1, stdout=f"The following Go files require formatting:\n{lint_result.stdout}\n", ) return LintResults([lint_result], linter_name="gofmt")