コード例 #1
0
def create_python_binary(python_binary_adaptor: PythonBinaryAdaptor,
                         python_setup: PythonSetup) -> CreatedBinary:
    transitive_hydrated_targets = yield Get(
        TransitiveHydratedTargets,
        BuildFileAddresses((python_binary_adaptor.address, )))
    all_targets = transitive_hydrated_targets.closure
    all_target_adaptors = [t.adaptor for t in all_targets]

    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        adaptors=tuple(all_targets), python_setup=python_setup)

    source_root_stripped_sources = yield [
        Get(SourceRootStrippedSources, HydratedTarget, target_adaptor)
        for target_adaptor in all_targets
    ]

    #TODO(#8420) This way of calculating the entry point works but is a bit hackish.
    entry_point = None
    if hasattr(python_binary_adaptor, 'entry_point'):
        entry_point = python_binary_adaptor.entry_point
    else:
        sources_snapshot = python_binary_adaptor.sources.snapshot
        if len(sources_snapshot.files) == 1:
            target = transitive_hydrated_targets.roots[0]
            output = yield Get(SourceRootStrippedSources, HydratedTarget,
                               target)
            root_filename = output.snapshot.files[0]
            entry_point = PythonBinary.translate_source_path_to_py_module_specifier(
                root_filename)

    stripped_sources_digests = [
        stripped_sources.snapshot.directory_digest
        for stripped_sources in source_root_stripped_sources
    ]
    sources_digest = yield Get(
        Digest,
        DirectoriesToMerge(directories=tuple(stripped_sources_digests)))
    inits_digest = yield Get(InjectedInitDigest, Digest, sources_digest)
    all_input_digests = [sources_digest, inits_digest.directory_digest]
    merged_input_files = yield Get(
        Digest, DirectoriesToMerge,
        DirectoriesToMerge(directories=tuple(all_input_digests)))

    requirements = PexRequirements.create_from_adaptors(all_target_adaptors)
    output_filename = f"{python_binary_adaptor.address.target_name}.pex"

    create_requirements_pex = CreatePex(
        output_filename=output_filename,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        entry_point=entry_point,
        input_files_digest=merged_input_files,
    )

    pex = yield Get(Pex, CreatePex, create_requirements_pex)
    yield CreatedBinary(digest=pex.directory_digest,
                        binary_name=pex.output_filename)
コード例 #2
0
async def create_binary(
    console: Console,
    workspace: Workspace,
    options: BinaryOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
) -> Binary:
    targets_to_valid_configs = await Get[TargetsToValidConfigurations](
        TargetsToValidConfigurationsRequest(
            BinaryConfiguration,
            goal_description=f"the `{options.name}` goal",
            error_if_no_valid_targets=True,
        ))
    binaries = await MultiGet(
        Get[CreatedBinary](BinaryConfiguration, config)
        for config in targets_to_valid_configs.configurations)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(binary.digest for binary in binaries)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for path in result.output_paths:
            print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}")
    return Binary(exit_code=0)
コード例 #3
0
async def create_binary(addresses: BuildFileAddresses, console: Console,
                        workspace: Workspace, options: Binary.Options,
                        options_bootstrapper: OptionsBootstrapper,
                        build_root: BuildRoot) -> Binary:
    with Binary.line_oriented(options, console) as print_stdout:
        global_options = options_bootstrapper.bootstrap_options.for_global_scope(
        )
        pants_distdir = Path(global_options.pants_distdir)
        if not is_child_of(pants_distdir, build_root.pathlib_path):
            console.print_stderr(
                f"When set to an absolute path, `--pants-distdir` must be relative to the build root."
                "You set it to {pants_distdir}. Instead, use a relative path or an absolute path relative to the build root."
            )
            return Binary(exit_code=1)

        relative_distdir = pants_distdir.relative_to(
            build_root.pathlib_path) if pants_distdir.is_absolute(
            ) else pants_distdir
        print_stdout(f"Generating binaries in `./{relative_distdir}`")

        binaries = await MultiGet(
            Get[CreatedBinary](Address, address.to_address())
            for address in addresses)
        merged_digest = await Get[Digest](DirectoriesToMerge(
            tuple(binary.digest for binary in binaries)))
        result = workspace.materialize_directory(
            DirectoryToMaterialize(merged_digest,
                                   path_prefix=str(relative_distdir)))
        for path in result.output_paths:
            print_stdout(f"Wrote {path}")
    return Binary(exit_code=0)
コード例 #4
0
async def create_binary(
    targets_with_origins: TargetsWithOrigins,
    console: Console,
    workspace: Workspace,
    options: BinaryOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
    union_membership: UnionMembership,
    registered_target_types: RegisteredTargetTypes,
) -> Binary:
    valid_config_types_by_target = gather_valid_binary_configuration_types(
        goal_subsytem=options,
        targets_with_origins=targets_with_origins,
        union_membership=union_membership,
        registered_target_types=registered_target_types,
    )
    binaries = await MultiGet(
        Get[CreatedBinary](BinaryConfiguration, valid_config_type.create(
            target))
        for target, valid_config_types in valid_config_types_by_target.items()
        for valid_config_type in valid_config_types)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(binary.digest for binary in binaries)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for path in result.output_paths:
            print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}")
    return Binary(exit_code=0)
コード例 #5
0
ファイル: rules.py プロジェクト: tgodzik/pants
async def setup_black(wrapped_target: FormattablePythonTarget, black: Black) -> BlackSetup:
  config_path: Optional[str] = black.get_options().config
  config_snapshot = await Get(Snapshot, PathGlobs(include=(config_path,)))
  resolved_requirements_pex = await Get(
    Pex, CreatePex(
      output_filename="black.pex",
      requirements=PexRequirements(requirements=tuple(black.get_requirement_specs())),
      interpreter_constraints=PexInterpreterConstraints(
        constraint_set=tuple(black.default_interpreter_constraints)
      ),
      entry_point=black.get_entry_point(),
    )
  )

  sources_digest = wrapped_target.target.sources.snapshot.directory_digest

  merged_input_files = await Get(
    Digest,
    DirectoriesToMerge(
      directories=(
        sources_digest,
        resolved_requirements_pex.directory_digest,
        config_snapshot.directory_digest,
      )
    ),
  )
  return BlackSetup(config_path, resolved_requirements_pex, merged_input_files)
コード例 #6
0
ファイル: rules.py プロジェクト: grimreaper/pants
async def create_black_request(
  wrapped_target: BlackTarget,
  black_args: BlackArgs,
  black_setup: BlackSetup,
  python_setup: PythonSetup,
  subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> ExecuteProcessRequest:
  target = wrapped_target.target
  sources_digest = wrapped_target.prior_formatter_result_digest or target.sources.snapshot.directory_digest
  merged_input_files = await Get[Digest](
    DirectoriesToMerge(
      directories=(
        sources_digest,
        black_setup.requirements_pex.directory_digest,
        black_setup.config_snapshot.directory_digest,
      )
    ),
  )
  return black_setup.requirements_pex.create_execute_request(
      python_setup=python_setup,
      subprocess_encoding_environment=subprocess_encoding_environment,
      pex_path="./black.pex",
      pex_args=black_args.args,
      input_files=merged_input_files,
      output_files=target.sources.snapshot.files,
      description=f'Run Black for {target.address.reference()}',
  )
コード例 #7
0
 def run_isort(
     self,
     targets: List[TargetAdaptorWithOrigin],
     *,
     config: Optional[str] = None,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResult, FmtResult]:
     args = ["--backend-packages2=pants.backend.python.lint.isort"]
     if config is not None:
         self.create_file(relpath=".isort.cfg", contents=config)
         args.append("--isort-config=.isort.cfg")
     if passthrough_args:
         args.append(f"--isort-args='{passthrough_args}'")
     if skip:
         args.append(f"--isort-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     lint_result = self.request_single_product(
         LintResult,
         Params(IsortFormatter(tuple(targets)), options_bootstrapper))
     input_snapshot = self.request_single_product(
         Snapshot,
         DirectoriesToMerge(
             tuple(target.adaptor.sources.snapshot.directory_digest
                   for target in targets)),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             IsortFormatter(tuple(targets),
                            prior_formatter_result=input_snapshot),
             options_bootstrapper,
         ),
     )
     return lint_result, fmt_result
コード例 #8
0
async def run_setup_py(
    req: RunSetupPyRequest,
    setuptools_setup: SetuptoolsSetup,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    merged_input_files = await Get[Digest](DirectoriesToMerge(
        directories=(req.chroot.digest,
                     setuptools_setup.requirements_pex.directory_digest)))
    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist/"
    process = setuptools_setup.requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./setuptools.pex",
        pex_args=("setup.py", *req.args),
        input_files=merged_input_files,
        # setuptools commands that create dists write them to the distdir.
        # TODO: Could there be other useful files to capture?
        output_directories=(dist_dir, ),
        description=
        f"Run setuptools for {req.exported_target.target.address.reference()}",
    )
    result = await Get[ProcessResult](Process, process)
    output_digest = await Get[Digest](DirectoryWithPrefixToStrip(
        result.output_directory_digest, dist_dir))
    return RunSetupPyResult(output_digest)
コード例 #9
0
async def fmt(console: Console, targets: HydratedTargets, workspace: Workspace,
              union_membership: UnionMembership) -> Fmt:
    aggregated_results = await MultiGet(
        Get[AggregatedFmtResults](FormatTarget, target.adaptor)
        for target in targets
        if FormatTarget.is_formattable(target.adaptor,
                                       union_membership=union_membership))
    individual_results = [
        result for aggregated_result in aggregated_results
        for result in aggregated_result.results
    ]

    if not individual_results:
        return Fmt(exit_code=0)

    # NB: this will fail if there are any conflicting changes, which we want to happen rather than
    # silently having one result override the other. In practicality, this should never happen due
    # to our use of an aggregator rule for each distinct language.
    merged_formatted_digest = await Get[Digest](DirectoriesToMerge(
        tuple(aggregated_result.combined_digest
              for aggregated_result in aggregated_results)))
    workspace.materialize_directory(
        DirectoryToMaterialize(merged_formatted_digest))
    for result in individual_results:
        if result.stdout:
            console.print_stdout(result.stdout)
        if result.stderr:
            console.print_stderr(result.stderr)

    # Since the rules to produce FmtResult should use ExecuteRequest, rather than
    # FallibleExecuteProcessRequest, we assume that there were no failures.
    return Fmt(exit_code=0)
コード例 #10
0
async def determine_specified_source_files(request: SpecifiedSourceFilesRequest) -> SourceFiles:
    """Determine the specified `sources` for targets, possibly finding a subset of the original
    `sources` fields if the user supplied file arguments."""
    full_snapshots = {}
    snapshot_subset_requests = {}
    for adaptor_with_origin in request.adaptors_with_origins:
        adaptor = adaptor_with_origin.adaptor
        if not adaptor.has_sources():
            continue
        result = determine_specified_sources_for_target(adaptor_with_origin)
        if isinstance(result, Snapshot):
            full_snapshots[adaptor] = result
        else:
            snapshot_subset_requests[adaptor] = result

    snapshot_subsets: Tuple[Snapshot, ...] = ()
    if snapshot_subset_requests:
        snapshot_subsets = await MultiGet(
            Get[Snapshot](SnapshotSubset, request) for request in snapshot_subset_requests.values()
        )

    all_snapshots: Iterable[Snapshot] = (*full_snapshots.values(), *snapshot_subsets)
    if request.strip_source_roots:
        all_adaptors = (*full_snapshots.keys(), *snapshot_subset_requests.keys())
        stripped_snapshots = await MultiGet(
            Get[SourceRootStrippedSources](
                StripTargetRequest(adaptor, specified_files_snapshot=snapshot)
            )
            for adaptor, snapshot in zip(all_adaptors, all_snapshots)
        )
        all_snapshots = (stripped_snapshot.snapshot for stripped_snapshot in stripped_snapshots)
    result = await Get[Snapshot](
        DirectoriesToMerge(tuple(snapshot.directory_digest for snapshot in all_snapshots))
    )
    return SourceFiles(result)
コード例 #11
0
ファイル: fmt.py プロジェクト: OniOni/pants
async def fmt(console: Console, targets: HydratedTargets, workspace: Workspace,
              union_membership: UnionMembership) -> Fmt:
    results = await MultiGet(
        Get[FmtResult](TargetWithSources, target.adaptor) for target in targets
        if TargetWithSources.is_formattable_and_lintable(
            target.adaptor, union_membership=union_membership))

    if not results:
        return Fmt(exit_code=0)

    # NB: this will fail if there are any conflicting changes, which we want to happen rather than
    # silently having one result override the other.
    # TODO(#8722): how should we handle multiple auto-formatters touching the same files?
    merged_formatted_digest = await Get[Digest](DirectoriesToMerge(
        tuple(result.digest for result in results)))
    workspace.materialize_directory(
        DirectoryToMaterialize(merged_formatted_digest))
    for result in results:
        if result.stdout:
            console.print_stdout(result.stdout)
        if result.stderr:
            console.print_stderr(result.stderr)

    # Since the rules to produce FmtResult should use ExecuteRequest, rather than
    # FallibleExecuteProcessRequest, we assume that there were no failures.
    return Fmt(exit_code=0)
コード例 #12
0
 def run_docformatter(
     self,
     targets: List[TargetAdaptorWithOrigin],
     *,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResult, FmtResult]:
     args = ["--backend-packages2=pants.backend.python.lint.docformatter"]
     if passthrough_args:
         args.append(f"--docformatter-args='{passthrough_args}'")
     if skip:
         args.append(f"--docformatter-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     lint_result = self.request_single_product(
         LintResult,
         Params(DocformatterFormatter(tuple(targets)),
                options_bootstrapper))
     input_snapshot = self.request_single_product(
         Snapshot,
         DirectoriesToMerge(
             tuple(target.adaptor.sources.snapshot.directory_digest
                   for target in targets)),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             DocformatterFormatter(tuple(targets),
                                   prior_formatter_result=input_snapshot),
             options_bootstrapper,
         ),
     )
     return lint_result, fmt_result
コード例 #13
0
async def create_awslambda(
    console: Console,
    options: AWSLambdaOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
    workspace: Workspace,
) -> AWSLambdaGoal:
    targets_to_valid_configs = await Get[TargetsToValidConfigurations](
        TargetsToValidConfigurationsRequest(
            AWSLambdaConfiguration,
            goal_description=f"the `{options.name}` goal",
            error_if_no_valid_targets=True,
        ))
    awslambdas = await MultiGet(
        Get[CreatedAWSLambda](AWSLambdaConfiguration, config)
        for config in targets_to_valid_configs.configurations)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(awslambda.digest for awslambda in awslambdas)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for awslambda, path in zip(awslambdas, result.output_paths):
            print_stdout(
                f"Wrote code bundle to {os.path.relpath(path, buildroot.path)}"
            )
            print_stdout(f"  Runtime: {awslambda.runtime}")
            print_stdout(f"  Handler: {awslambda.handler}")
            print_stdout("")
    return AWSLambdaGoal(exit_code=0)
コード例 #14
0
async def get_sources(request: SetupPySourcesRequest,
                      source_root_config: SourceRootConfig) -> SetupPySources:
    targets = request.hydrated_targets
    stripped_srcs_list = await MultiGet(
        Get[SourceRootStrippedSources](HydratedTarget, target)
        for target in targets)

    # Create a chroot with all the sources, and any ancestor __init__.py files that might be needed
    # for imports to work.  Note that if a repo has multiple exported targets under a single ancestor
    # package, then that package must be a namespace package, which in Python 3 means it must not
    # have an __init__.py. We don't validate this here, because it would require inspecting *all*
    # targets, whether or not they are in the target set for this run - basically the entire repo.
    # So it's the repo owners' responsibility to ensure __init__.py hygiene.
    stripped_srcs_digests = [
        stripped_sources.snapshot.directory_digest
        for stripped_sources in stripped_srcs_list
    ]
    ancestor_init_pys = await Get[AncestorInitPyFiles](HydratedTargets,
                                                       targets)
    sources_digest = await Get[Digest](DirectoriesToMerge(directories=tuple(
        [*stripped_srcs_digests, *ancestor_init_pys.digests])))
    init_pys_snapshot = await Get[Snapshot](SnapshotSubset(
        sources_digest, PathGlobs(['**/__init__.py'])))
    init_py_contents = await Get[FilesContent](
        Digest, init_pys_snapshot.directory_digest)

    packages, namespace_packages, package_data = find_packages(
        source_roots=source_root_config.get_source_roots(),
        tgts_and_stripped_srcs=list(zip(targets, stripped_srcs_list)),
        init_py_contents=init_py_contents,
        py2=request.py2)
    return SetupPySources(digest=sources_digest,
                          packages=packages,
                          namespace_packages=namespace_packages,
                          package_data=package_data)
コード例 #15
0
async def create_pex_from_target_closure(request: CreatePexFromTargetClosure,
                                         python_setup: PythonSetup) -> Pex:
    transitive_hydrated_targets = await Get[TransitiveHydratedTargets](
        Addresses, request.addresses)
    all_targets = transitive_hydrated_targets.closure
    all_target_adaptors = [t.adaptor for t in all_targets]

    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        adaptors=all_target_adaptors, python_setup=python_setup)

    input_digests = []
    if request.additional_input_files:
        input_digests.append(request.additional_input_files)
    if request.include_source_files:
        chrooted_sources = await Get[ChrootedPythonSources](
            HydratedTargets(all_targets))
        input_digests.append(chrooted_sources.snapshot.directory_digest)
    merged_input_digest = await Get[Digest](
        DirectoriesToMerge(directories=tuple(input_digests)))
    requirements = PexRequirements.create_from_adaptors(
        adaptors=all_target_adaptors,
        additional_requirements=request.additional_requirements)

    create_pex_request = CreatePex(
        output_filename=request.output_filename,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        entry_point=request.entry_point,
        input_files_digest=merged_input_digest,
        additional_args=request.additional_args,
    )

    pex = await Get[Pex](CreatePex, create_pex_request)
    return pex
コード例 #16
0
def get_black_input(
    wrapped_target: FormattablePythonTarget,
    black: Black,
) -> BlackInput:
    config_path = black.get_options().config
    config_snapshot = yield Get(Snapshot, PathGlobs(include=(config_path, )))

    resolved_requirements_pex = yield Get(
        Pex,
        CreatePex(
            output_filename="black.pex",
            requirements=PexRequirements(
                requirements=tuple(black.get_requirement_specs())),
            interpreter_constraints=PexInterpreterContraints(
                constraint_set=frozenset(
                    black.default_interpreter_constraints)),
            entry_point=black.get_entry_point(),
        ))
    target = wrapped_target.target
    sources_digest = target.sources.snapshot.directory_digest

    all_input_digests = [
        sources_digest,
        resolved_requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    ]
    merged_input_files = yield Get(
        Digest,
        DirectoriesToMerge(directories=tuple(all_input_digests)),
    )
    yield BlackInput(config_path, resolved_requirements_pex,
                     merged_input_files)
コード例 #17
0
async def create_python_awslambda(
    lambda_tgt_adaptor: PythonAWSLambdaAdaptor,
    lambdex_setup: LambdexSetup,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CreatedAWSLambda:
    # TODO: We must enforce that everything is built for Linux, no matter the local platform.
    pex_filename = f"{lambda_tgt_adaptor.address.target_name}.pex"
    pex_request = LegacyPexFromTargetsRequest(
        addresses=Addresses([lambda_tgt_adaptor.address]),
        entry_point=None,
        output_filename=pex_filename,
    )

    pex = await Get[Pex](LegacyPexFromTargetsRequest, pex_request)
    merged_input_files = await Get[Digest](DirectoriesToMerge(
        directories=(pex.directory_digest,
                     lambdex_setup.requirements_pex.directory_digest)))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    lambdex_args = ("build", "-e", lambda_tgt_adaptor.handler, pex_filename)
    process_request = lambdex_setup.requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./lambdex.pex",
        pex_args=lambdex_args,
        input_files=merged_input_files,
        output_files=(pex_filename, ),
        description=f"Run Lambdex for {lambda_tgt_adaptor.address.reference()}",
    )
    result = await Get[ExecuteProcessResult](ExecuteProcessRequest,
                                             process_request)
    return CreatedAWSLambda(digest=result.output_directory_digest,
                            name=pex_filename)
コード例 #18
0
async def run_setup_py(
    req: RunSetupPyRequest,
    setuptools_setup: SetuptoolsSetup,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment
) -> RunSetupPyResult:
  """Run a setup.py command on a single exported target."""
  merged_input_files = await Get[Digest](
    DirectoriesToMerge(directories=(
      req.chroot.digest,
      setuptools_setup.requirements_pex.directory_digest))
  )
  # The setuptools dist dir, created by it under the chroot (not to be confused with
  # pants's own dist dir, at the buildroot).
  # TODO: The user can change this with the --dist-dir flag to the sdist and bdist_wheel commands.
  #  See https://github.com/pantsbuild/pants/issues/8912.
  dist_dir = 'dist/'
  request = setuptools_setup.requirements_pex.create_execute_request(
    python_setup=python_setup,
    subprocess_encoding_environment=subprocess_encoding_environment,
    pex_path="./setuptools.pex",
    pex_args=('setup.py', *req.args),
    input_files=merged_input_files,
    # setuptools commands that create dists write them to the distdir.
    # TODO: Could there be other useful files to capture?
    output_directories=(dist_dir,),
    description=f'Run setuptools for {req.exported_target.hydrated_target.address.reference()}',
  )
  result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
  output_digest = await Get[Digest](
    DirectoryWithPrefixToStrip(result.output_directory_digest, dist_dir))
  return RunSetupPyResult(output_digest)
コード例 #19
0
ファイル: pex.py プロジェクト: sazlin/pants
def create_pex(request: CreatePex, pex_bin: DownloadedPexBin,
               python_setup: PythonSetup,
               subprocess_encoding_environment: SubprocessEncodingEnvironment,
               pex_build_environment: PexBuildEnvironment,
               platform: Platform) -> Pex:
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    argv = ["--output-file", request.output_filename]
    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))

    source_dir_name = 'source_files'

    argv.append(f'--sources-directory={source_dir_name}')
    sources_digest = request.input_files_digest if request.input_files_digest else EMPTY_DIRECTORY_DIGEST
    sources_digest_as_subdir = yield Get(
        Digest, DirectoryWithPrefixToAdd(sources_digest, source_dir_name))
    all_inputs = (
        pex_bin.directory_digest,
        sources_digest_as_subdir,
    )
    merged_digest = yield Get(Digest,
                              DirectoriesToMerge(directories=all_inputs))

    # NB: PEX outputs are platform dependent so in order to get a PEX that we can use locally, without
    # cross-building we specify that out PEX command be run on the current local platform. When we
    # support cross-building through CLI flags we can configure requests that build a PEX for out
    # local platform that are able to execute on a different platform, but for now in order to
    # guarantee correct build we need to restrict this command to execute on the same platform type
    # that the output is intended for. The correct way to interpret the keys
    # (execution_platform_constraint, target_platform_constraint) of this dictionary is "The output of
    # this command is intended for `target_platform_constraint` iff it is run on `execution_platform
    # constraint`".
    execute_process_request = MultiPlatformExecuteProcessRequest(
        {
            (PlatformConstraint(platform.value),
             PlatformConstraint(platform.value)):
            pex_bin.create_execute_request(
                python_setup=python_setup,
                subprocess_encoding_environment=subprocess_encoding_environment,
                pex_build_environment=pex_build_environment,
                pex_args=argv,
                input_files=merged_digest,
                description=
                f"Create a requirements PEX: {', '.join(request.requirements)}",
                output_files=(request.output_filename, ))
        })

    result = yield Get(ExecuteProcessResult,
                       MultiPlatformExecuteProcessRequest,
                       execute_process_request)
    yield Pex(directory_digest=result.output_directory_digest)
コード例 #20
0
ファイル: rules.py プロジェクト: rahuliyer95/pants
async def setup(
    request: SetupRequest,
    isort: Isort,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    adaptors_with_origins = request.formatter.adaptors_with_origins

    requirements_pex = await Get[Pex](PexRequest(
        output_filename="isort.pex",
        requirements=PexRequirements(isort.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            isort.default_interpreter_constraints),
        entry_point=isort.get_entry_point(),
    ))

    config_path: Optional[List[str]] = isort.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=config_path or (),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        conjunction=GlobExpansionConjunction.all_match,
        description_of_origin="the option `--isort-config`",
    ))

    if request.formatter.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest(
            adaptor_with_origin.adaptor
            for adaptor_with_origin in adaptors_with_origins))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.formatter.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        LegacySpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    process_request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./isort.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            isort=isort,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=f"Run isort for {address_references}",
    )
    return Setup(process_request)
コード例 #21
0
async def setup(
    request: SetupRequest,
    black: Black,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="black.pex",
        requirements=PexRequirements(black.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            black.default_interpreter_constraints),
        entry_point=black.get_entry_point(),
    ))

    config_path: Optional[str] = black.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--black-config`",
    ))

    if request.configs.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
            config.sources for config in request.configs))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.configs.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in request.configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in request.configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./black.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            black=black,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=
        (f"Run Black on {pluralize(len(request.configs), 'target')}: {address_references}."
         ),
    )
    return Setup(process)
コード例 #22
0
    def test_asynchronously_merge_directories(self):
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            with open(os.path.join(temp_dir, "susannah"), "w") as f:
                f.write("Not sure actually")
            (empty_snapshot, roland_snapshot, susannah_snapshot,
             both_snapshot) = (self.scheduler.capture_snapshots((
                 PathGlobsAndRoot(PathGlobs(("doesnotmatch", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("roland", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("susannah", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("*", ), ()), text_type(temp_dir)),
             )))

            empty_merged = self.scheduler.product_request(
                Digest,
                [DirectoriesToMerge((empty_snapshot.directory_digest, ))],
            )[0]
            self.assertEqual(empty_snapshot.directory_digest, empty_merged)

            roland_merged = self.scheduler.product_request(
                Digest,
                [
                    DirectoriesToMerge((roland_snapshot.directory_digest,
                                        empty_snapshot.directory_digest))
                ],
            )[0]
            self.assertEqual(
                roland_snapshot.directory_digest,
                roland_merged,
            )

            both_merged = self.scheduler.product_request(
                Digest,
                [
                    DirectoriesToMerge((roland_snapshot.directory_digest,
                                        susannah_snapshot.directory_digest))
                ],
            )[0]

            self.assertEqual(both_snapshot.directory_digest, both_merged)
コード例 #23
0
async def flake8_lint(
    configs: Flake8Configurations,
    flake8: Flake8,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if flake8.options.skip:
        return LintResult.noop()

    # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that
    # each target runs with its own interpreter constraints. See
    # http://flake8.pycqa.org/en/latest/user/invocation.html.
    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (config.compatibility for config in configs), python_setup)
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="flake8.pex",
        requirements=PexRequirements(flake8.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=flake8.get_entry_point(),
    ))

    config_path: Optional[str] = flake8.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--flake8-config`",
    ))

    all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
        config.sources for config in configs))
    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./flake8.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               flake8=flake8),
        input_files=merged_input_files,
        description=
        f"Run Flake8 on {pluralize(len(configs), 'target')}: {address_references}.",
    )
    result = await Get[FallibleProcessResult](Process, process)
    return LintResult.from_fallible_process_result(result)
コード例 #24
0
async def bandit_lint(
    configs: BanditConfigurations,
    bandit: Bandit,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if bandit.options.skip:
        return LintResult.noop()

    # NB: Bandit output depends upon which Python interpreter version it's run with. See
    # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit.
    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (config.compatibility for config in configs),
        python_setup=python_setup)
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="bandit.pex",
        requirements=PexRequirements(bandit.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=bandit.get_entry_point(),
    ))

    config_path: Optional[str] = bandit.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--bandit-config`",
    ))

    all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
        config.sources for config in configs))
    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./bandit.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               bandit=bandit),
        input_files=merged_input_files,
        description=
        f"Run Bandit on {pluralize(len(configs), 'target')}: {address_references}.",
    )
    result = await Get[FallibleProcessResult](Process, process)
    return LintResult.from_fallible_process_result(result)
コード例 #25
0
async def create_pex_from_target_closure(request: CreatePexFromTargetClosure,
                                         python_setup: PythonSetup) -> Pex:
    transitive_hydrated_targets = await Get[TransitiveHydratedTargets](
        BuildFileAddresses, request.build_file_addresses)
    all_targets = transitive_hydrated_targets.closure
    all_target_adaptors = [t.adaptor for t in all_targets]

    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        adaptors=tuple(all_targets), python_setup=python_setup)

    merged_input_files: Optional[Digest] = None
    if request.include_source_files:
        source_root_stripped_sources = await MultiGet(
            Get[SourceRootStrippedSources](HydratedTarget, target_adaptor)
            for target_adaptor in all_targets)

        stripped_sources_digests = [
            stripped_sources.snapshot.directory_digest
            for stripped_sources in source_root_stripped_sources
        ]
        sources_digest = await Get[Digest](
            DirectoriesToMerge(directories=tuple(stripped_sources_digests)))
        inits_digest = await Get[InjectedInitDigest](Digest, sources_digest)
        all_input_digests = [sources_digest, inits_digest.directory_digest]
        merged_input_files = await Get[Digest](
            DirectoriesToMerge,
            DirectoriesToMerge(directories=tuple(all_input_digests)))

    requirements = PexRequirements.create_from_adaptors(
        adaptors=all_target_adaptors,
        additional_requirements=request.additional_requirements)

    create_pex_request = CreatePex(
        output_filename=request.output_filename,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        entry_point=request.entry_point,
        input_files_digest=merged_input_files,
    )

    pex = await Get[Pex](CreatePex, create_pex_request)
    return pex
コード例 #26
0
async def merge_coverage_data(
    data_batch: PytestCoverageDataBatch,
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all python test results and merges their coverage data into a single sql file."""
    # We start with a bunch of test results, each of which has a coverage data file called `.coverage`
    # We prefix each of these with their address so that we can write them all into a single pex.
    coverage_directory_digests = await MultiGet(
        Get[Digest](
            DirectoryWithPrefixToAdd(
                directory_digest=result.test_result.coverage_data.
                digest,  # type: ignore[attr-defined]
                prefix=result.address.path_safe_spec,
            )) for result in data_batch.addresses_and_test_results
        if result.test_result is not None
        and result.test_result.coverage_data is not None)
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    coveragerc = await Get[Coveragerc](CoveragercRequest(HydratedTargets(
        transitive_targets.closure),
                                                         test_time=True))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            *coverage_directory_digests,
            sources_with_inits_snapshot.snapshot.directory_digest,
            coveragerc.digest,
            coverage_setup.requirements_pex.directory_digest,
        )),
    )

    prefixes = [
        f"{result.address.path_safe_spec}/.coverage"
        for result in data_batch.addresses_and_test_results
    ]
    coverage_args = ["combine", *prefixes]
    request = coverage_setup.requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage", ),
        description=f"Merge coverage reports.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
コード例 #27
0
async def generate_coverage_report(
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    merged_coverage_data: MergedCoverageData,
    coverage_toolbase: PytestCoverage,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CoverageReport:
    """Takes all python test results and generates a single coverage report."""
    requirements_pex = coverage_setup.requirements_pex
    # TODO(#4535) We need a better way to do this kind of check that covers synthetic targets and rules extensibility.
    python_targets = [
        target for target in transitive_targets.closure
        if target.adaptor.type_alias in ("python_library", "python_tests")
    ]

    coveragerc = await Get[Coveragerc](CoveragercRequest(
        HydratedTargets(python_targets)))
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            merged_coverage_data.coverage_data,
            coveragerc.digest,
            requirements_pex.directory_digest,
            sources_with_inits_snapshot.snapshot.directory_digest,
        )),
    )
    report_type = coverage_toolbase.options.report
    coverage_args = [report_type.report_name]
    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_directories=("htmlcov", ),
        output_files=("coverage.xml", ),
        description=f"Generate coverage report.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    if report_type == ReportType.CONSOLE:
        return ConsoleCoverageReport(result.stdout.decode())

    return FilesystemCoverageReport(
        result.output_directory_digest,
        coverage_toolbase.options.report_output_path)
コード例 #28
0
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
  if request.py2:
    # TODO: Implement Python 2 support.  This will involve, among other things: merging ancestor
    # __init__.py files into the chroot, detecting packages based on the presence of __init__.py,
    # and inspecting all __init__.py files for the namespace package incantation.
    raise UnsupportedPythonVersion('Running setup.py commands not supported for Python 2.')

  owned_deps = await Get[OwnedDependencies](DependencyOwner(request.exported_target))
  targets = HydratedTargets(od.hydrated_target for od in owned_deps)
  sources = await Get[SetupPySources](SetupPySourcesRequest(targets))
  requirements = await Get[ExportedTargetRequirements](DependencyOwner(request.exported_target))

  # Nest the sources under the src/ prefix.
  src_digest = await Get[Digest](DirectoryWithPrefixToAdd(sources.digest, CHROOT_SOURCE_ROOT))

  # Generate the kwargs to the setup() call.
  setup_kwargs = request.exported_target.hydrated_target.adaptor.provides.setup_py_keywords.copy()
  setup_kwargs.update({
    'package_dir': {'': CHROOT_SOURCE_ROOT},
    'packages': sources.packages,
    'namespace_packages': sources.namespace_packages,
    'package_data': dict(sources.package_data),
    'install_requires': requirements.requirement_strs
  })
  ht = request.exported_target.hydrated_target
  key_to_binary_spec = getattr(ht.adaptor.provides, 'binaries', {})
  keys = list(key_to_binary_spec.keys())
  binaries = await MultiGet(Get[HydratedTarget](
    Address, Address.parse(key_to_binary_spec[key], relative_to=ht.address.spec_path))
    for key in keys)
  for key, binary in zip(keys, binaries):
    if (not isinstance(binary.adaptor, PythonBinaryAdaptor) or
        getattr(binary.adaptor, 'entry_point', None) is None):
      raise InvalidEntryPoint(
        f'The binary {key} exported by {ht.address.reference()} is not a valid entry point.')
    entry_points = setup_kwargs['entry_points'] = setup_kwargs.get('entry_points', {})
    console_scripts = entry_points['console_scripts'] = entry_points.get('console_scripts', [])
    console_scripts.append(f'{key}={binary.adaptor.entry_point}')

  # Generate the setup script.
  setup_py_content = SETUP_BOILERPLATE.format(
    target_address_spec=ht.address.reference(),
    setup_kwargs_str=distutils_repr(setup_kwargs)
  ).encode()
  extra_files_digest = await Get[Digest](
    InputFilesContent([
      FileContent('setup.py', setup_py_content),
      FileContent('MANIFEST.in', 'include *.py'.encode())  # Make sure setup.py is included.
    ]))

  chroot_digest = await Get[Digest](DirectoriesToMerge((src_digest, extra_files_digest)))
  return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs, sort_keys=True))
コード例 #29
0
async def prepare_chrooted_python_sources(
        hydrated_targets: HydratedTargets) -> ChrootedPythonSources:
    """Prepares Python sources by stripping the source root and injecting missing init.py files.

  NB: This is useful for Pytest or ./pants run, but not every Python rule will need this.
  For example, autoformatters like Black do not need to understand relative imports or
  execute the code, so they can safely operate on the original source files without
  stripping source roots.
  """

    source_root_stripped_sources = await MultiGet(
        Get[SourceRootStrippedSources](HydratedTarget, hydrated_target)
        for hydrated_target in hydrated_targets)

    sources_digest = await Get[Digest](DirectoriesToMerge(directories=tuple(
        stripped_sources.snapshot.directory_digest
        for stripped_sources in source_root_stripped_sources)))
    inits_digest = await Get[InjectedInitDigest](Digest, sources_digest)
    sources_digest = await Get[Digest](
        DirectoriesToMerge(directories=(sources_digest,
                                        inits_digest.directory_digest)))
    return ChrootedPythonSources(digest=sources_digest)
コード例 #30
0
async def pex_from_targets(request: PexFromTargetsRequest,
                           python_setup: PythonSetup) -> PexRequest:
    transitive_targets = await Get[TransitiveTargets](Addresses,
                                                      request.addresses)
    all_targets = transitive_targets.closure

    python_targets = []
    resource_targets = []
    python_requirement_fields = []
    for tgt in all_targets:
        if tgt.has_field(PythonSources):
            python_targets.append(tgt)
        if tgt.has_field(PythonRequirementsField):
            python_requirement_fields.append(tgt[PythonRequirementsField])
        # NB: PythonRequirementsFileSources is a subclass of FilesSources. We filter it out so that
        # requirements.txt is not included in the PEX and so that irrelevant changes to it (e.g.
        # whitespace changes) do not invalidate the PEX.
        if tgt.has_field(ResourcesSources) or (
                tgt.has_field(FilesSources)
                and not tgt.has_field(PythonRequirementsFileSources)):
            resource_targets.append(tgt)

    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (tgt.get(PythonInterpreterCompatibility) for tgt in python_targets),
        python_setup)

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get[ImportablePythonSources](
            Targets(python_targets + resource_targets))
        input_digests.append(prepared_sources.snapshot.directory_digest)
    merged_input_digest = await Get[Digest](
        DirectoriesToMerge(directories=tuple(input_digests)))

    requirements = PexRequirements.create_from_requirement_fields(
        python_requirement_fields,
        additional_requirements=request.additional_requirements)

    return PexRequest(
        output_filename=request.output_filename,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=request.description,
    )