Beispiel #1
0
async def create_python_binary(
        config: PythonBinaryConfiguration) -> CreatedBinary:
    entry_point: Optional[str]
    if config.entry_point.value is not None:
        entry_point = config.entry_point.value
    else:
        source_files = await Get[SourceFiles](AllSourceFilesRequest(
            [config.sources], strip_source_roots=True))
        # NB: `PythonBinarySources` enforces that we have 0-1 sources.
        if len(source_files.files) == 1:
            module_name = source_files.files[0]
            entry_point = PythonBinary.translate_source_path_to_py_module_specifier(
                module_name)
        else:
            entry_point = None

    output_filename = f"{config.address.target_name}.pex"
    two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=Addresses([config.address]),
            entry_point=entry_point,
            output_filename=output_filename,
            additional_args=config.generate_additional_args(),
            description=f"Building {output_filename}",
        )))
    pex = two_step_pex.pex
    return CreatedBinary(digest=pex.directory_digest,
                         binary_name=pex.output_filename)
Beispiel #2
0
async def create_python_binary(
        python_binary_adaptor: PythonBinaryAdaptor) -> CreatedBinary:
    # TODO(#8420) This way of calculating the entry point works but is a bit hackish.
    if hasattr(python_binary_adaptor, "entry_point"):
        entry_point = python_binary_adaptor.entry_point
    else:
        sources = await Get[SourceFiles](AllSourceFilesRequest(
            [python_binary_adaptor], strip_source_roots=True))
        # NB: `python_binary` targets may have 0-1 sources. This is enforced by
        # `PythonBinaryAdaptor`.
        if len(sources.snapshot.files) == 1:
            module_name = sources.snapshot.files[0]
            entry_point = PythonBinary.translate_source_path_to_py_module_specifier(
                module_name)
        else:
            entry_point = None

    request = CreatePexFromTargetClosure(
        addresses=Addresses((python_binary_adaptor.address, )),
        entry_point=entry_point,
        output_filename=f"{python_binary_adaptor.address.target_name}.pex",
    )

    pex = await Get[Pex](CreatePexFromTargetClosure, request)
    return CreatedBinary(digest=pex.directory_digest,
                         binary_name=pex.output_filename)
Beispiel #3
0
async def setup(
    request: SetupRequest,
    isort: Isort,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    adaptors_with_origins = request.formatter.adaptors_with_origins

    requirements_pex = await Get[Pex](CreatePex(
        output_filename="isort.pex",
        requirements=PexRequirements(isort.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            isort.default_interpreter_constraints),
        entry_point=isort.get_entry_point(),
    ))

    config_path: Optional[List[str]] = isort.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=config_path or (),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        conjunction=GlobExpansionConjunction.all_match,
        description_of_origin="the option `--isort-config`",
    ))

    if request.formatter.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
            adaptor_with_origin.adaptor
            for adaptor_with_origin in adaptors_with_origins))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.formatter.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    process_request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./isort.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            isort=isort,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=f"Run isort for {address_references}",
    )
    return Setup(process_request)
Beispiel #4
0
async def merge_coverage_data(
    data_batch: PytestCoverageDataBatch,
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all python test results and merges their coverage data into a single sql file."""
    # We start with a bunch of test results, each of which has a coverage data file called `.coverage`
    # We prefix each of these with their address so that we can write them all into a single pex.
    coverage_directory_digests = await MultiGet(
        Get[Digest](
            DirectoryWithPrefixToAdd(
                directory_digest=result.test_result.coverage_data.
                digest,  # type: ignore[attr-defined]
                prefix=result.address.path_safe_spec,
            )) for result in data_batch.addresses_and_test_results
        if result.test_result is not None
        and result.test_result.coverage_data is not None)
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    coveragerc = await Get[Coveragerc](CoveragercRequest(HydratedTargets(
        transitive_targets.closure),
                                                         test_time=True))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            *coverage_directory_digests,
            sources_with_inits_snapshot.snapshot.directory_digest,
            coveragerc.digest,
            coverage_setup.requirements_pex.directory_digest,
        )),
    )

    prefixes = [
        f"{result.address.path_safe_spec}/.coverage"
        for result in data_batch.addresses_and_test_results
    ]
    coverage_args = ["combine", *prefixes]
    request = coverage_setup.requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage", ),
        description=f"Merge coverage reports.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
Beispiel #5
0
async def generate_coverage_report(
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    merged_coverage_data: MergedCoverageData,
    coverage_toolbase: PytestCoverage,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CoverageReport:
    """Takes all python test results and generates a single coverage report."""
    requirements_pex = coverage_setup.requirements_pex
    # TODO(#4535) We need a better way to do this kind of check that covers synthetic targets and rules extensibility.
    python_targets = [
        target for target in transitive_targets.closure
        if target.adaptor.type_alias in ("python_library", "python_tests")
    ]

    coveragerc = await Get[Coveragerc](CoveragercRequest(
        HydratedTargets(python_targets)))
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            merged_coverage_data.coverage_data,
            coveragerc.digest,
            requirements_pex.directory_digest,
            sources_with_inits_snapshot.snapshot.directory_digest,
        )),
    )
    report_type = coverage_toolbase.options.report
    coverage_args = [report_type.report_name]
    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_directories=("htmlcov", ),
        output_files=("coverage.xml", ),
        description=f"Generate coverage report.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    if report_type == ReportType.CONSOLE:
        return ConsoleCoverageReport(result.stdout.decode())

    return FilesystemCoverageReport(
        result.output_directory_digest,
        coverage_toolbase.options.report_output_path)
Beispiel #6
0
async def setup(
    request: SetupRequest,
    docformatter: Docformatter,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    adaptors_with_origins = request.formatter.adaptors_with_origins

    requirements_pex = await Get[Pex](CreatePex(
        output_filename="docformatter.pex",
        requirements=PexRequirements(docformatter.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            docformatter.default_interpreter_constraints),
        entry_point=docformatter.get_entry_point(),
    ))

    if request.formatter.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
            adaptor_with_origin.adaptor
            for adaptor_with_origin in adaptors_with_origins))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.formatter.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    process_request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./docformatter.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            docformatter=docformatter,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=f"Run docformatter for {address_references}",
    )
    return Setup(process_request)
async def prepare_chrooted_python_sources(
    hydrated_targets: HydratedTargets, ) -> ChrootedPythonSources:
    """Prepares Python sources by stripping the source root and injecting missing __init__.py files.

    NB: This is useful for Pytest or ./pants run, but not every Python rule will need this.
    For example, autoformatters like Black do not need to understand relative imports or
    execute the code, so they can safely operate on the original source files without
    stripping source roots.
    """
    stripped_sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in hydrated_targets), strip_source_roots=True))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        stripped_sources.snapshot))
    return ChrootedPythonSources(init_injected.snapshot)
 def get_all_source_files(
     self,
     adaptors_with_origins: Iterable[TargetAdaptorWithOrigin],
     *,
     strip_source_roots: bool = False,
 ) -> List[str]:
     request = AllSourceFilesRequest(
         (adaptor_with_origin.adaptor
          for adaptor_with_origin in adaptors_with_origins),
         strip_source_roots=strip_source_roots,
     )
     result = self.request_single_product(
         SourceFiles, Params(request, create_options_bootstrapper()))
     return sorted(result.snapshot.files)
 def get_all_source_files(
     self,
     sources_fields_with_origins: Iterable[Tuple[SourcesField, OriginSpec]],
     *,
     strip_source_roots: bool = False,
 ) -> List[str]:
     request = AllSourceFilesRequest(
         (
             sources_field_with_origin[0]
             for sources_field_with_origin in sources_fields_with_origins
         ),
         strip_source_roots=strip_source_roots,
     )
     result = self.request_single_product(
         SourceFiles, Params(request, create_options_bootstrapper())
     )
     return sorted(result.snapshot.files)
Beispiel #10
0
async def construct_coverage_config(
        source_root_config: SourceRootConfig,
        coverage_config_request: CoveragercRequest) -> Coveragerc:
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in coverage_config_request.hydrated_targets),
        strip_source_roots=False,
    ))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        sources.snapshot))
    source_roots = source_root_config.get_source_roots()

    # Generate a map from source root stripped source to its source root. eg:
    #  {'pants/testutil/subsystem/util.py': 'src/python'}
    # This is so coverage reports referencing /chroot/path/pants/testutil/subsystem/util.py can be mapped
    # back to the actual sources they reference when generating coverage reports.
    def source_root_stripped_source_and_source_root(
            file_name: str) -> Tuple[str, str]:
        source_root = source_roots.find_by_path(file_name)
        source_root_path = source_root.path if source_root is not None else ""
        source_root_stripped_path = file_name[len(source_root_path) + 1:]
        return (source_root_stripped_path, source_root_path)

    source_to_target_base = dict(
        source_root_stripped_source_and_source_root(filename)
        for filename in sorted(init_injected.snapshot.files))
    config_parser = configparser.ConfigParser()
    config_parser.read_file(StringIO(DEFAULT_COVERAGE_CONFIG))
    ensure_section(config_parser, "run")
    config_parser.set("run", "plugins", COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.add_section(COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.set(COVERAGE_PLUGIN_MODULE_NAME, "source_to_target_base",
                      json.dumps(source_to_target_base))
    config_parser.set(COVERAGE_PLUGIN_MODULE_NAME, "test_time",
                      json.dumps(coverage_config_request.test_time))
    config = StringIO()
    config_parser.write(config)
    coveragerc_digest = await Get[Digest](InputFilesContent,
                                          get_coveragerc_input(
                                              config.getvalue()))
    return Coveragerc(coveragerc_digest)
Beispiel #11
0
async def format_python_target(
        python_formatters: PythonFormatters,
        union_membership: UnionMembership) -> LanguageFmtResults:
    adaptors_with_origins = python_formatters.adaptors_with_origins
    original_sources = await Get[SourceFiles](AllSourceFilesRequest(
        adaptor_with_origin.adaptor
        for adaptor_with_origin in adaptors_with_origins))
    prior_formatter_result = original_sources.snapshot

    results: List[FmtResult] = []
    formatters: Iterable[
        Type[PythonFormatter]] = union_membership.union_rules[PythonFormatter]
    for formatter in formatters:
        result = await Get[FmtResult](
            PythonFormatter,
            formatter(adaptors_with_origins,
                      prior_formatter_result=prior_formatter_result),
        )
        if result != FmtResult.noop():
            results.append(result)
            prior_formatter_result = await Get[Snapshot](Digest, result.digest)
    return LanguageFmtResults(
        tuple(results),
        combined_digest=prior_formatter_result.directory_digest)
Beispiel #12
0
async def lint(
    linter: Flake8Linter,
    flake8: Flake8,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if flake8.options.skip:
        return LintResult.noop()

    adaptors_with_origins = linter.adaptors_with_origins

    # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that
    # each target runs with its own interpreter constraints. See
    # http://flake8.pycqa.org/en/latest/user/invocation.html.
    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        (adaptor_with_origin.adaptor
         for adaptor_with_origin in adaptors_with_origins),
        python_setup=python_setup,
    )
    requirements_pex = await Get[Pex](CreatePex(
        output_filename="flake8.pex",
        requirements=PexRequirements(flake8.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=flake8.get_entry_point(),
    ))

    config_path: Optional[str] = flake8.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--flake8-config`",
    ))

    all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
        adaptor_with_origin.adaptor
        for adaptor_with_origin in adaptors_with_origins))
    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./flake8.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               flake8=flake8),
        input_files=merged_input_files,
        description=f"Run Flake8 for {address_references}",
    )
    result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest,
                                                     request)
    return LintResult.from_fallible_execute_process_result(result)
async def prepare_python_sources(targets: Targets) -> ImportablePythonSources:
    stripped_sources = await Get[SourceFiles](AllSourceFilesRequest(
        (tgt.get(Sources) for tgt in targets), strip_source_roots=True))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        stripped_sources.snapshot))
    return ImportablePythonSources(init_injected.snapshot)