async def run_buf(request: BufRequest, buf: BufSubsystem) -> LintResults: if buf.skip: return LintResults([], linter_name=request.name) transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest( (field_set.address for field_set in request.field_sets)), ) all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (field_set.sources for field_set in request.field_sets), for_sources_types=(ProtobufSourceField, ), enable_codegen=True, ), ) download_buf_get = Get(DownloadedExternalTool, ExternalToolRequest, buf.get_request(Platform.current)) target_sources_stripped, all_sources_stripped, downloaded_buf = await MultiGet( target_stripped_sources_request, all_stripped_sources_request, download_buf_get) input_digest = await Get( Digest, MergeDigests(( target_sources_stripped.snapshot.digest, all_sources_stripped.snapshot.digest, downloaded_buf.digest, )), ) process_result = await Get( FallibleProcessResult, Process( argv=[ downloaded_buf.exe, "lint", *buf.args, "--path", ",".join(target_sources_stripped.snapshot.files), ], input_digest=input_digest, description= f"Run Buf on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) result = LintResult.from_fallible_process_result(process_result) return LintResults([result], linter_name=request.name)
async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: if shfmt.skip: return LintResults([], linter_name="shfmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="shfmt")
async def flake8_lint( request: Flake8Request, flake8: Flake8, python_setup: PythonSetup, first_party_plugins: Flake8FirstPartyPlugins, ) -> LintResults: if flake8.skip: return LintResults([], linter_name=request.name) # NB: Flake8 output depends upon which Python interpreter version it's run with # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their # constraints to ensure, for example, that all Python 2 targets run together and all Python 3 # targets run together. results = defaultdict(set) for fs in request.field_sets: constraints = InterpreterConstraints.create_from_compatibility_fields( [ fs.interpreter_constraints, *first_party_plugins.interpreter_constraints_fields ], python_setup, ) results[constraints].add(fs) partitioned_results = await MultiGet( Get( LintResult, Flake8Partition( tuple(sorted(field_sets, key=lambda fs: fs.address)), constraints), ) for constraints, field_sets in sorted(results.items())) return LintResults(partitioned_results, linter_name=request.name)
async def pylint_lint(request: PylintRequest, pylint: Pylint, python_setup: PythonSetup) -> LintResults: if pylint.skip: return LintResults([], linter_name="Pylint") plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, pylint.source_plugins) plugin_targets_request = Get( TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)) linted_targets_request = Get( Targets, Addresses(field_set.address for field_set in request.field_sets)) plugin_targets, linted_targets = await MultiGet(plugin_targets_request, linted_targets_request) plugin_targets_compatibility_fields = tuple( plugin_tgt[InterpreterConstraintsField] for plugin_tgt in plugin_targets.closure if plugin_tgt.has_field(InterpreterConstraintsField)) # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. per_target_dependencies = await MultiGet( Get(Targets, DependenciesRequest(field_set.dependencies)) for field_set in request.field_sets) # We batch targets by their interpreter constraints to ensure, for example, that all Python 2 # targets run together and all Python 3 targets run together. # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin # targets in this interpreter constraints calculation. interpreter_constraints_to_target_setup = defaultdict(set) for field_set, tgt, dependencies in zip(request.field_sets, linted_targets, per_target_dependencies): target_setup = PylintTargetSetup(field_set, Targets([tgt, *dependencies])) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( *(tgt[InterpreterConstraintsField] for tgt in [tgt, *dependencies] if tgt.has_field(InterpreterConstraintsField)), *plugin_targets_compatibility_fields, ), python_setup, ) interpreter_constraints_to_target_setup[interpreter_constraints].add( target_setup) partitions = (PylintPartition( tuple( sorted(target_setups, key=lambda tgt_setup: tgt_setup.field_set.address)), interpreter_constraints, Targets(plugin_targets.closure), ) for interpreter_constraints, target_setups in sorted( interpreter_constraints_to_target_setup.items())) partitioned_results = await MultiGet( Get(LintResult, PylintPartition, partition) for partition in partitions) return LintResults(partitioned_results, linter_name="Pylint")
async def pyupgrade_lint(result: PyUpgradeResult, pyupgrade: PyUpgrade) -> LintResults: if pyupgrade.skip: return LintResults([], linter_name="pyupgrade") return LintResults( [LintResult.from_fallible_process_result(result.process_result)], linter_name="pyupgrade", )
async def docformatter_lint(request: DocformatterRequest, docformatter: Docformatter) -> LintResults: if docformatter.skip: return LintResults([], linter_name="Docformatter") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="Docformatter")
async def black_lint(field_sets: BlackRequest, black: Black) -> LintResults: if black.skip: return LintResults([], linter_name="Black") setup = await Get(Setup, SetupRequest(field_sets, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result, strip_chroot_path=True)], linter_name="Black", )
async def pylint_lint(request: PylintRequest, pylint: Pylint) -> LintResults: if pylint.skip: return LintResults([], linter_name=request.name) partitions = await Get(PylintPartitions, PylintRequest, request) partitioned_results = await MultiGet( Get(LintResult, PylintPartition, partition) for partition in partitions) return LintResults(partitioned_results, linter_name=request.name)
async def yapf_lint(request: YapfRequest, yapf: Yapf) -> LintResults: if yapf.skip: return LintResults([], linter_name="yapf") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result)], linter_name="yapf", )
async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: if shfmt.options.skip: return LintResults([], linter_name="shfmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) # We use `FallibleProcessResult`, rather than `ProcessResult`, because we're okay with the # Process failing. result = await Get(FallibleProcessResult, Process, setup.process) return LintResults([LintResult.from_fallible_process_result(result)], linter_name="shfmt")
async def isort_lint(request: IsortRequest, isort: Isort) -> LintResults: if isort.skip: return LintResults([], linter_name="isort") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result, strip_chroot_path=True)], linter_name="isort", )
async def run_terraform_validate( request: ValidateRequest, subsystem: TerraformValidateSubsystem) -> LintResults: if subsystem.options.skip: return LintResults([], linter_name="terraform validate") sources_files = await Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in request.field_sets), ) # `terraform validate` operates on a directory-by-directory basis. First determine the directories in # the snapshot. This does not use `source_files_snapshot.dirs` because that will be empty if the files # are in a single directory. directories = defaultdict(list) for file in sources_files.snapshot.files: directory = os.path.dirname(file) if directory == "": directory = "." directories[directory].append(file) # Then create a process for each directory. directory_to_process = {} for directory, files_in_directory in directories.items(): args = [ "validate", directory, ] args = [arg for arg in args if arg] process = TerraformProcess( args=tuple(args), input_digest=sources_files.snapshot.digest, output_files=tuple(files_in_directory), description= f"Run `terraform validate` on {pluralize(len(files_in_directory), 'file')}.", ) directory_to_process[directory] = process results = await MultiGet( Get(FallibleProcessResult, TerraformProcess, process) for process in directory_to_process.values()) lint_results = [] for directory, result in zip(directory_to_process.keys(), results): lint_result = LintResult( exit_code=result.exit_code, stdout=result.stdout.decode(), stderr=result.stderr.decode(), partition_description=f"`terraform validate` on `{directory}`", ) lint_results.append(lint_result) return LintResults(lint_results, linter_name="terraform validate")
def test_streaming_output_failure() -> None: results = LintResults([LintResult(18, "stdout", "stderr")], linter_name="linter") assert results.level() == LogLevel.ERROR assert results.message() == dedent("""\ linter failed (exit code 18). stdout stderr """)
async def tffmt_lint(request: TffmtRequest, tffmt: TfFmtSubsystem) -> LintResults: if tffmt.options.skip: return LintResults([], linter_name="tffmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) results = await MultiGet( Get(FallibleProcessResult, TerraformProcess, process) for directory, process in setup.directory_to_process.items() ) lint_results = [LintResult.from_fallible_process_result(result) for result in results] return LintResults(lint_results, linter_name="tffmt")
def test_streaming_output_success() -> None: results = LintResults([LintResult(0, "stdout", "stderr")], linter_name="linter") assert results.level() == LogLevel.INFO assert results.message() == dedent("""\ linter succeeded. stdout stderr """)
async def docformatter_lint( request: DocformatterRequest, docformatter: Docformatter ) -> LintResults: if docformatter.options.skip: return LintResults() setup = await Get[Setup](SetupRequest(request, check_only=True)) result = await Get[FallibleProcessResult](Process, setup.process) return LintResults( [LintResult.from_fallible_process_result(result, linter_name="Docformatter")] )
async def pylint_lint(request: PylintRequest, pylint: Pylint, python_setup: PythonSetup) -> LintResults: if pylint.skip: return LintResults() plugin_targets_request = Get[TransitiveTargets](Addresses( Address.parse(plugin_addr) for plugin_addr in pylint.source_plugins)) linted_targets_request = Get[Targets](Addresses( field_set.address for field_set in request.field_sets)) plugin_targets, linted_targets = cast( Tuple[TransitiveTargets, Targets], await MultiGet([plugin_targets_request, linted_targets_request]), ) plugin_targets_compatibility_fields = tuple( plugin_tgt[PythonInterpreterCompatibility] for plugin_tgt in plugin_targets.closure if plugin_tgt.has_field(PythonInterpreterCompatibility)) # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. per_target_dependencies = await MultiGet( Get[Targets](DependenciesRequest(field_set.dependencies)) for field_set in request.field_sets) # We batch targets by their interpreter constraints to ensure, for example, that all Python 2 # targets run together and all Python 3 targets run together. interpreter_constraints_to_target_setup = defaultdict(set) for field_set, tgt, dependencies in zip(request.field_sets, linted_targets, per_target_dependencies): target_setup = PylintTargetSetup(field_set, Targets([tgt, *dependencies])) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( *(tgt.get(PythonInterpreterCompatibility) for tgt in [tgt, *dependencies]), *plugin_targets_compatibility_fields, ), python_setup, ) or PexInterpreterConstraints(pylint.default_interpreter_constraints) interpreter_constraints_to_target_setup[interpreter_constraints].add( target_setup) partitions = (PylintPartition( tuple( sorted(target_setups, key=lambda target_setup: target_setup.field_set.address)), interpreter_constraints, Targets(plugin_targets.closure), ) for interpreter_constraints, target_setups in sorted( interpreter_constraints_to_target_setup.items())) partitioned_results = await MultiGet( Get[LintResult](PylintPartition, partition) for partition in partitions) return LintResults(partitioned_results)
async def run_hadolint(request: HadolintRequest, hadolint: Hadolint) -> LintResults: if hadolint.skip: return LintResults([], linter_name=request.name) downloaded_hadolint, config_files = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, hadolint.get_request(Platform.current)), Get(ConfigFiles, ConfigFilesRequest, hadolint.config_request()), ) dockerfile_infos = await MultiGet( Get(DockerfileInfo, DockerfileInfoRequest(field_set.address)) for field_set in request.field_sets) input_digest = await Get( Digest, MergeDigests(( downloaded_hadolint.digest, config_files.snapshot.digest, *(info.digest for info in dockerfile_infos), )), ) process_result = await Get( FallibleProcessResult, Process( argv=[ downloaded_hadolint.exe, *generate_argv(dockerfile_infos, hadolint) ], # Hadolint tries to read a configuration file from a few locations on the system: # https://github.com/hadolint/hadolint/blob/43d2bfe9f71dea9ddd203d5bdbd2cc1fb512e4dd/src/Hadolint/Config/Configfile.hs#L75-L101 # # We don't want it to do this in order to have reproducible results machine to machine # and there is also the problem that on some machines, an unset (as opposed to empty) # HOME env var crashes hadolint with SIGSEGV. # See: https://github.com/hadolint/hadolint/issues/741 # # As such, we set HOME to blank so no system configuration is found and, as a side # benefit, we don't crash. # # See https://github.com/pantsbuild/pants/issues/13735 for more details. env={"HOME": ""}, input_digest=input_digest, description= f"Run `hadolint` on {pluralize(len(dockerfile_infos), 'Dockerfile')}.", level=LogLevel.DEBUG, ), ) return LintResults( [LintResult.from_fallible_process_result(process_result)], linter_name=request.name)
async def scalafmt_lint(request: ScalafmtRequest, tool: ScalafmtSubsystem) -> LintResults: if tool.skip: return LintResults([], linter_name=request.name) setup = await Get(Setup, SetupRequest(request, check_only=True)) results = await MultiGet( Get(FallibleProcessResult, JvmProcess, partition.process) for partition in setup.partitions ) lint_results = [ LintResult.from_fallible_process_result(result, partition_description=partition.description) for result, partition in zip(results, setup.partitions) ] return LintResults(lint_results, linter_name=request.name)
async def gofmt_lint(request: GofmtRequest, gofmt: GofmtSubsystem) -> LintResults: if gofmt.options.skip: return LintResults([], linter_name="gofmt") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) lint_result = LintResult.from_fallible_process_result(result) if lint_result.exit_code == 0 and lint_result.stdout.strip() != "": # Note: gofmt returns success even if it would have reformatted the files. # When this occurs, convert the LintResult into a failure. lint_result = dataclasses.replace( lint_result, exit_code=1, stdout=f"The following Go files require formatting:\n{lint_result.stdout}\n", ) return LintResults([lint_result], linter_name="gofmt")
async def run_helm_lint(request: HelmLintRequest, helm_subsystem: HelmSubsystem) -> LintResults: charts = await MultiGet( Get(HelmChart, HelmChartRequest(field_set)) for field_set in request.field_sets if not field_set.skip_lint.value) logger.debug(f"Linting {pluralize(len(charts), 'chart')}...") def create_process(chart: HelmChart, field_set: HelmLintFieldSet) -> HelmProcess: argv = ["lint", chart.path] strict: bool = field_set.lint_strict.value or helm_subsystem.lint_strict if strict: argv.append("--strict") return HelmProcess( argv, input_digest=chart.snapshot.digest, description=f"Linting chart: {chart.metadata.name}", ) process_results = await MultiGet( Get( FallibleProcessResult, HelmProcess, create_process(chart, field_set), ) for chart, field_set in zip(charts, request.field_sets)) results = [ LintResult.from_fallible_process_result( process_result, partition_description=chart.metadata.name) for chart, process_result in zip(charts, process_results) ] return LintResults(results, linter_name=request.name)
async def flake8_lint(request: Flake8Request, flake8: Flake8, python_setup: PythonSetup) -> LintResults: if flake8.skip: return LintResults([], linter_name="Flake8") # NB: Flake8 output depends upon which Python interpreter version it's run with # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their # constraints to ensure, for example, that all Python 2 targets run together and all Python 3 # targets run together. constraints_to_field_sets = InterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup) partitioned_results = await MultiGet( Get(LintResult, Flake8Partition(partition_field_sets, partition_compatibility)) for partition_compatibility, partition_field_sets in constraints_to_field_sets.items()) return LintResults(partitioned_results, linter_name="Flake8")
async def bandit_lint(request: BanditRequest, bandit: Bandit, python_setup: PythonSetup) -> LintResults: if bandit.skip: return LintResults([], linter_name="Bandit") # NB: Bandit output depends upon which Python interpreter version it's run with # ( https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit). We # batch targets by their constraints to ensure, for example, that all Python 2 targets run # together and all Python 3 targets run together. constraints_to_field_sets = InterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup) partitioned_results = await MultiGet( Get(LintResult, BanditPartition(partition_field_sets, partition_compatibility)) for partition_compatibility, partition_field_sets in constraints_to_field_sets.items()) return LintResults(partitioned_results, linter_name="Bandit")
async def run_go_vet(request: GoVetRequest, go_vet_subsystem: GoVetSubsystem) -> LintResults: if go_vet_subsystem.skip: return LintResults([], linter_name=request.name) source_files = await Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in request.field_sets), ) owning_go_mods = await MultiGet( Get(OwningGoMod, OwningGoModRequest(field_set.address)) for field_set in request.field_sets) owning_go_mod_addresses = {x.address for x in owning_go_mods} go_mod_infos = await MultiGet( Get(GoModInfo, GoModInfoRequest(address)) for address in owning_go_mod_addresses) input_digest = await Get( Digest, MergeDigests([ source_files.snapshot.digest, *(info.digest for info in set(go_mod_infos)) ]), ) package_dirs = sorted( {os.path.dirname(f) for f in source_files.snapshot.files}) process_result = await Get( FallibleProcessResult, GoSdkProcess( ("vet", *(f"./{p}" for p in package_dirs)), input_digest=input_digest, description= f"Run `go vet` on {pluralize(len(source_files.snapshot.files), 'file')}.", ), ) result = LintResult.from_fallible_process_result(process_result) return LintResults([result], linter_name=request.name)
def test_streaming_output_partitions() -> None: results = LintResults( [ LintResult(21, "", "", partition_description="ghc8.1"), LintResult(0, "stdout", "stderr", partition_description="ghc9.2"), ], linter_name="linter", ) assert results.level() == LogLevel.ERROR assert results.message() == dedent("""\ linter failed (exit code 21). Partition #1 - ghc8.1: Partition #2 - ghc9.2: stdout stderr """)
async def google_java_format_lint( request: GoogleJavaFormatRequest, tool: GoogleJavaFormatSubsystem) -> LintResults: if tool.skip: return LintResults([], linter_name=request.name) setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, JvmProcess, setup.process) lint_result = LintResult.from_fallible_process_result(result) if lint_result.exit_code == 0 and lint_result.stdout.strip() != "": # Note: The formetter returns success even if it would have reformatted the files. # When this occurs, convert the LintResult into a failure. lint_result = dataclasses.replace( lint_result, exit_code=1, stdout= f"The following Java files require formatting:\n{lint_result.stdout}\n", ) return LintResults([lint_result], linter_name=request.name)
async def autoflake_lint(request: AutoflakeRequest, autoflake: Autoflake) -> LintResults: if autoflake.skip: return LintResults([], linter_name="autoflake") setup = await Get(Setup, SetupRequest(request, check_only=True)) result = await Get(FallibleProcessResult, Process, setup.process) def strip_check_result(output: str) -> str: return "\n".join(line for line in output.splitlines() if line != "No issues detected!") return LintResults( [ LintResult( result.exit_code, strip_check_result(result.stdout.decode()), result.stderr.decode(), ) ], linter_name="autoflake", )
def lint_result(self) -> LintResults: addresses = [config.address for config in self.field_sets] return LintResults( [ LintResult( self.exit_code(addresses), self.stdout(addresses), "", linter_name=self.linter_name, ) ] )
async def run_hadolint(request: HadolintRequest, hadolint: Hadolint) -> LintResults: if hadolint.skip: return LintResults([], linter_name="Hadolint") downloaded_hadolint, sources, config_files = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, hadolint.get_request(Platform.current)), Get( SourceFiles, SourceFilesRequest( [field_set.sources for field_set in request.field_sets], for_sources_types=(DockerImageSources, ), enable_codegen=True, ), ), Get(ConfigFiles, ConfigFilesRequest, hadolint.config_request()), ) input_digest = await Get( Digest, MergeDigests(( sources.snapshot.digest, downloaded_hadolint.digest, config_files.snapshot.digest, )), ) process_result = await Get( FallibleProcessResult, Process( argv=[downloaded_hadolint.exe, *generate_argv(sources, hadolint)], input_digest=input_digest, description= f"Run `hadolint` on {pluralize(len(sources.files), 'Dockerfile')}.", level=LogLevel.DEBUG, ), ) return LintResults( [LintResult.from_fallible_process_result(process_result)], linter_name="hadolint")
def lint_results(self) -> LintResults: return LintResults([], linter_name="SkippedLinter")