예제 #1
0
파일: pex.py 프로젝트: sazlin/pants
def create_pex(request: CreatePex, pex_bin: DownloadedPexBin,
               python_setup: PythonSetup,
               subprocess_encoding_environment: SubprocessEncodingEnvironment,
               pex_build_environment: PexBuildEnvironment,
               platform: Platform) -> Pex:
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    argv = ["--output-file", request.output_filename]
    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))

    source_dir_name = 'source_files'

    argv.append(f'--sources-directory={source_dir_name}')
    sources_digest = request.input_files_digest if request.input_files_digest else EMPTY_DIRECTORY_DIGEST
    sources_digest_as_subdir = yield Get(
        Digest, DirectoryWithPrefixToAdd(sources_digest, source_dir_name))
    all_inputs = (
        pex_bin.directory_digest,
        sources_digest_as_subdir,
    )
    merged_digest = yield Get(Digest,
                              DirectoriesToMerge(directories=all_inputs))

    # NB: PEX outputs are platform dependent so in order to get a PEX that we can use locally, without
    # cross-building we specify that out PEX command be run on the current local platform. When we
    # support cross-building through CLI flags we can configure requests that build a PEX for out
    # local platform that are able to execute on a different platform, but for now in order to
    # guarantee correct build we need to restrict this command to execute on the same platform type
    # that the output is intended for. The correct way to interpret the keys
    # (execution_platform_constraint, target_platform_constraint) of this dictionary is "The output of
    # this command is intended for `target_platform_constraint` iff it is run on `execution_platform
    # constraint`".
    execute_process_request = MultiPlatformExecuteProcessRequest(
        {
            (PlatformConstraint(platform.value),
             PlatformConstraint(platform.value)):
            pex_bin.create_execute_request(
                python_setup=python_setup,
                subprocess_encoding_environment=subprocess_encoding_environment,
                pex_build_environment=pex_build_environment,
                pex_args=argv,
                input_files=merged_digest,
                description=
                f"Create a requirements PEX: {', '.join(request.requirements)}",
                output_files=(request.output_filename, ))
        })

    result = yield Get(ExecuteProcessResult,
                       MultiPlatformExecuteProcessRequest,
                       execute_process_request)
    yield Pex(directory_digest=result.output_directory_digest)
예제 #2
0
async def merge_coverage_data(
    data_batch: PytestCoverageDataBatch,
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all python test results and merges their coverage data into a single sql file."""
    # We start with a bunch of test results, each of which has a coverage data file called `.coverage`
    # We prefix each of these with their address so that we can write them all into a single pex.
    coverage_directory_digests = await MultiGet(
        Get[Digest](
            DirectoryWithPrefixToAdd(
                directory_digest=result.test_result.coverage_data.
                digest,  # type: ignore[attr-defined]
                prefix=result.address.path_safe_spec,
            )) for result in data_batch.addresses_and_test_results
        if result.test_result is not None
        and result.test_result.coverage_data is not None)
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    coveragerc = await Get[Coveragerc](CoveragercRequest(HydratedTargets(
        transitive_targets.closure),
                                                         test_time=True))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            *coverage_directory_digests,
            sources_with_inits_snapshot.snapshot.directory_digest,
            coveragerc.digest,
            coverage_setup.requirements_pex.directory_digest,
        )),
    )

    prefixes = [
        f"{result.address.path_safe_spec}/.coverage"
        for result in data_batch.addresses_and_test_results
    ]
    coverage_args = ["combine", *prefixes]
    request = coverage_setup.requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage", ),
        description=f"Merge coverage reports.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
예제 #3
0
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
  if request.py2:
    # TODO: Implement Python 2 support.  This will involve, among other things: merging ancestor
    # __init__.py files into the chroot, detecting packages based on the presence of __init__.py,
    # and inspecting all __init__.py files for the namespace package incantation.
    raise UnsupportedPythonVersion('Running setup.py commands not supported for Python 2.')

  owned_deps = await Get[OwnedDependencies](DependencyOwner(request.exported_target))
  targets = HydratedTargets(od.hydrated_target for od in owned_deps)
  sources = await Get[SetupPySources](SetupPySourcesRequest(targets))
  requirements = await Get[ExportedTargetRequirements](DependencyOwner(request.exported_target))

  # Nest the sources under the src/ prefix.
  src_digest = await Get[Digest](DirectoryWithPrefixToAdd(sources.digest, CHROOT_SOURCE_ROOT))

  # Generate the kwargs to the setup() call.
  setup_kwargs = request.exported_target.hydrated_target.adaptor.provides.setup_py_keywords.copy()
  setup_kwargs.update({
    'package_dir': {'': CHROOT_SOURCE_ROOT},
    'packages': sources.packages,
    'namespace_packages': sources.namespace_packages,
    'package_data': dict(sources.package_data),
    'install_requires': requirements.requirement_strs
  })
  ht = request.exported_target.hydrated_target
  key_to_binary_spec = getattr(ht.adaptor.provides, 'binaries', {})
  keys = list(key_to_binary_spec.keys())
  binaries = await MultiGet(Get[HydratedTarget](
    Address, Address.parse(key_to_binary_spec[key], relative_to=ht.address.spec_path))
    for key in keys)
  for key, binary in zip(keys, binaries):
    if (not isinstance(binary.adaptor, PythonBinaryAdaptor) or
        getattr(binary.adaptor, 'entry_point', None) is None):
      raise InvalidEntryPoint(
        f'The binary {key} exported by {ht.address.reference()} is not a valid entry point.')
    entry_points = setup_kwargs['entry_points'] = setup_kwargs.get('entry_points', {})
    console_scripts = entry_points['console_scripts'] = entry_points.get('console_scripts', [])
    console_scripts.append(f'{key}={binary.adaptor.entry_point}')

  # Generate the setup script.
  setup_py_content = SETUP_BOILERPLATE.format(
    target_address_spec=ht.address.reference(),
    setup_kwargs_str=distutils_repr(setup_kwargs)
  ).encode()
  extra_files_digest = await Get[Digest](
    InputFilesContent([
      FileContent('setup.py', setup_py_content),
      FileContent('MANIFEST.in', 'include *.py'.encode())  # Make sure setup.py is included.
    ]))

  chroot_digest = await Get[Digest](DirectoriesToMerge((src_digest, extra_files_digest)))
  return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs, sort_keys=True))
예제 #4
0
async def merge_coverage_data(
    data_collection: PytestCoverageDataCollection,
    coverage_setup: CoverageSetup,
    transitive_targets: TransitiveTargets,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all Python test results and merges their coverage data into a single SQL file."""
    # We start with a bunch of test results, each of which has a coverage data file called
    # `.coverage`. We prefix each of these with their address so that we can write them all into a
    # single PEX.
    coverage_directory_digests = await MultiGet(
        Get[Digest](DirectoryWithPrefixToAdd(data.digest, prefix=data.address.path_safe_spec))
        for data in data_collection
    )
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure), strip_source_roots=False
        )
    )
    sources_with_inits = await Get[InitInjectedSnapshot](InjectInitRequest(sources.snapshot))
    coverage_config = await Get[CoverageConfig](
        CoverageConfigRequest(Targets(transitive_targets.closure), is_test_time=True)
    )
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(
            directories=(
                *coverage_directory_digests,
                sources_with_inits.snapshot.directory_digest,
                coverage_config.digest,
                coverage_setup.requirements_pex.directory_digest,
            )
        ),
    )

    prefixes = [f"{data.address.path_safe_spec}/.coverage" for data in data_collection]
    coverage_args = ("combine", *prefixes)
    process = coverage_setup.requirements_pex.create_process(
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage",),
        description=f"Merge {len(prefixes)} Pytest coverage reports.",
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
    )

    result = await Get[ProcessResult](Process, process)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
예제 #5
0
파일: test_fs.py 프로젝트: tpasternak/pants
  def test_add_prefix(self):
    input_files_content = InputFilesContent((
      FileContent(path='main.py', content=b'print("from main")', is_executable=False),
      FileContent(path='subdir/sub.py', content=b'print("from sub")', is_executable=False),
    ))

    digest, = self.scheduler.product_request(Digest, [input_files_content])

    dpa = DirectoryWithPrefixToAdd(digest, "outer_dir")
    output_digest, = self.scheduler.product_request(Digest, [dpa])
    snapshot, = self.scheduler.product_request(Snapshot, [output_digest])

    self.assertEqual(sorted(snapshot.files), ['outer_dir/main.py', 'outer_dir/subdir/sub.py'])
    self.assertEqual(sorted(snapshot.dirs), ['outer_dir', 'outer_dir/subdir'])
예제 #6
0
파일: test_fs.py 프로젝트: OniOni/pants
    def test_add_prefix(self):
        input_files_content = InputFilesContent((
            FileContent(path='main.py', content=b'print("from main")'),
            FileContent(path='subdir/sub.py', content=b'print("from sub")'),
        ))

        digest = self.request_single_product(Digest, input_files_content)

        dpa = DirectoryWithPrefixToAdd(digest, "outer_dir")
        output_digest = self.request_single_product(Digest, dpa)
        snapshot = self.request_single_product(Snapshot, output_digest)

        self.assertEqual(sorted(snapshot.files),
                         ['outer_dir/main.py', 'outer_dir/subdir/sub.py'])
        self.assertEqual(sorted(snapshot.dirs),
                         ['outer_dir', 'outer_dir/subdir'])
예제 #7
0
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
    exported_target = request.exported_target

    owned_deps = await Get[OwnedDependencies](DependencyOwner(exported_target))
    targets = Targets(od.target for od in owned_deps)
    sources = await Get[SetupPySources](SetupPySourcesRequest(targets,
                                                              py2=request.py2))
    requirements = await Get[ExportedTargetRequirements](
        DependencyOwner(exported_target))

    # Nest the sources under the src/ prefix.
    src_digest = await Get[Digest](DirectoryWithPrefixToAdd(
        sources.digest, CHROOT_SOURCE_ROOT))

    target = exported_target.target
    provides = exported_target.provides

    # Generate the kwargs to the setup() call.
    setup_kwargs = provides.setup_py_keywords.copy()
    setup_kwargs.update({
        "package_dir": {
            "": CHROOT_SOURCE_ROOT
        },
        "packages": sources.packages,
        "namespace_packages": sources.namespace_packages,
        "package_data": dict(sources.package_data),
        "install_requires": tuple(requirements),
    })
    key_to_binary_spec = provides.binaries
    keys = list(key_to_binary_spec.keys())
    binaries = await Get[Targets](Addresses(
        Address.parse(key_to_binary_spec[key],
                      relative_to=target.address.spec_path) for key in keys))
    for key, binary in zip(keys, binaries):
        binary_entry_point = binary.get(PythonEntryPoint).value
        if not binary_entry_point:
            raise InvalidEntryPoint(
                f"The binary {key} exported by {target.address.reference()} is not a valid entry "
                f"point.")
        entry_points = setup_kwargs["entry_points"] = setup_kwargs.get(
            "entry_points", {})
        console_scripts = entry_points["console_scripts"] = entry_points.get(
            "console_scripts", [])
        console_scripts.append(f"{key}={binary_entry_point}")

    # Generate the setup script.
    setup_py_content = SETUP_BOILERPLATE.format(
        target_address_spec=target.address.reference(),
        setup_kwargs_str=distutils_repr(setup_kwargs),
    ).encode()
    extra_files_digest = await Get[Digest](
        InputFilesContent([
            FileContent("setup.py", setup_py_content),
            FileContent(
                "MANIFEST.in",
                "include *.py".encode()),  # Make sure setup.py is included.
        ]))

    chroot_digest = await Get[Digest](DirectoriesToMerge(
        (src_digest, extra_files_digest)))
    return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs,
                                                   sort_keys=True))
예제 #8
0
파일: pex.py 프로젝트: wisechengyi/pants
async def create_pex(
    request: CreatePex,
    pex_bin: DownloadedPexBin,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
    pex_build_environment: PexBuildEnvironment,
    platform: Platform,
) -> Pex:
    """Returns a PEX with the given requirements, optional entry point, optional interpreter
    constraints, and optional requirement constraints."""

    argv = [
        "--output-file",
        request.output_filename,
        *request.interpreter_constraints.generate_pex_arg_list(),
        *request.additional_args,
    ]

    if python_setup.resolver_jobs:
        argv.extend(["--jobs", python_setup.resolver_jobs])

    if python_setup.manylinux:
        argv.extend(["--manylinux", python_setup.manylinux])
    else:
        argv.append("--no-manylinux")

    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])

    if python_setup.requirement_constraints is not None:
        argv.extend(["--constraints", python_setup.requirement_constraints])

    source_dir_name = "source_files"
    argv.append(f"--sources-directory={source_dir_name}")

    argv.extend(request.requirements.requirements)

    constraint_file_snapshot = EMPTY_SNAPSHOT
    if python_setup.requirement_constraints is not None:
        constraint_file_snapshot = await Get[Snapshot](PathGlobs(
            [python_setup.requirement_constraints],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            conjunction=GlobExpansionConjunction.all_match,
            description_of_origin=
            "the option `--python-setup-requirement-constraints`",
        ))

    sources_digest = (request.input_files_digest if request.input_files_digest
                      else EMPTY_DIRECTORY_DIGEST)
    sources_digest_as_subdir = await Get[Digest](DirectoryWithPrefixToAdd(
        sources_digest, source_dir_name))

    merged_digest = await Get[Digest](DirectoriesToMerge(directories=(
        pex_bin.directory_digest,
        sources_digest_as_subdir,
        constraint_file_snapshot.directory_digest,
    )))

    # NB: PEX outputs are platform dependent so in order to get a PEX that we can use locally, without
    # cross-building, we specify that our PEX command be run on the current local platform. When we
    # support cross-building through CLI flags we can configure requests that build a PEX for our
    # local platform that are able to execute on a different platform, but for now in order to
    # guarantee correct build we need to restrict this command to execute on the same platform type
    # that the output is intended for. The correct way to interpret the keys
    # (execution_platform_constraint, target_platform_constraint) of this dictionary is "The output of
    # this command is intended for `target_platform_constraint` iff it is run on `execution_platform
    # constraint`".
    execute_process_request = MultiPlatformExecuteProcessRequest({
        (
            PlatformConstraint(platform.value),
            PlatformConstraint(platform.value),
        ):
        pex_bin.create_execute_request(
            python_setup=python_setup,
            subprocess_encoding_environment=subprocess_encoding_environment,
            pex_build_environment=pex_build_environment,
            pex_args=argv,
            input_files=merged_digest,
            description=
            f"Create a requirements PEX: {', '.join(request.requirements.requirements)}",
            output_files=(request.output_filename, ),
        )
    })

    result = await Get[ExecuteProcessResult](
        MultiPlatformExecuteProcessRequest, execute_process_request)
    return Pex(directory_digest=result.output_directory_digest,
               output_filename=request.output_filename)
예제 #9
0
async def create_pex(
    request: PexRequest,
    pex_bin: DownloadedPexBin,
    python_setup: PythonSetup,
    python_repos: PythonRepos,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
    pex_build_environment: PexBuildEnvironment,
    platform: Platform,
    log_level: LogLevel,
) -> Pex:
    """Returns a PEX with the given requirements, optional entry point, optional interpreter
    constraints, and optional requirement constraints."""

    argv = [
        "--output-file",
        request.output_filename,
        *request.interpreter_constraints.generate_pex_arg_list(),
        # NB: In setting `--no-pypi`, we rely on the default value of `--python-repos-indexes`
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
        # case. Why set `--no-pypi`, then? We need to do this so that
        # `--python-repos-repos=['custom_url']` will only point to that index and not include PyPI.
        "--no-pypi",
        *(f"--index={index}" for index in python_repos.indexes),
        *(f"--repo={repo}" for repo in python_repos.repos),
        *request.additional_args,
    ]

    pex_debug = PexDebug(log_level)
    argv.extend(pex_debug.iter_pex_args())

    if python_setup.resolver_jobs:
        argv.extend(["--jobs", python_setup.resolver_jobs])

    if python_setup.manylinux:
        argv.extend(["--manylinux", python_setup.manylinux])
    else:
        argv.append("--no-manylinux")

    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])

    if python_setup.requirement_constraints is not None:
        argv.extend(["--constraints", python_setup.requirement_constraints])

    source_dir_name = "source_files"
    argv.append(f"--sources-directory={source_dir_name}")

    argv.extend(request.requirements.requirements)

    constraint_file_snapshot = EMPTY_SNAPSHOT
    if python_setup.requirement_constraints is not None:
        constraint_file_snapshot = await Get[Snapshot](PathGlobs(
            [python_setup.requirement_constraints],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            conjunction=GlobExpansionConjunction.all_match,
            description_of_origin=
            "the option `--python-setup-requirement-constraints`",
        ))

    sources_digest_as_subdir = await Get[Digest](DirectoryWithPrefixToAdd(
        request.sources or EMPTY_DIRECTORY_DIGEST, source_dir_name))
    additional_inputs_digest = request.additional_inputs or EMPTY_DIRECTORY_DIGEST

    merged_digest = await Get[Digest](DirectoriesToMerge(directories=(
        pex_bin.directory_digest,
        sources_digest_as_subdir,
        additional_inputs_digest,
        constraint_file_snapshot.directory_digest,
    )))

    # NB: PEX outputs are platform dependent so in order to get a PEX that we can use locally, without
    # cross-building, we specify that our PEX command be run on the current local platform. When we
    # support cross-building through CLI flags we can configure requests that build a PEX for our
    # local platform that are able to execute on a different platform, but for now in order to
    # guarantee correct build we need to restrict this command to execute on the same platform type
    # that the output is intended for. The correct way to interpret the keys
    # (execution_platform_constraint, target_platform_constraint) of this dictionary is "The output of
    # this command is intended for `target_platform_constraint` iff it is run on `execution_platform
    # constraint`".
    description = request.description
    if description is None:
        if request.requirements.requirements:
            description = f"Resolving {', '.join(request.requirements.requirements)}"
        else:
            description = f"Building PEX"
    execute_process_request = MultiPlatformExecuteProcessRequest({
        (
            PlatformConstraint(platform.value),
            PlatformConstraint(platform.value),
        ):
        pex_bin.create_execute_request(
            python_setup=python_setup,
            subprocess_encoding_environment=subprocess_encoding_environment,
            pex_build_environment=pex_build_environment,
            pex_args=argv,
            input_files=merged_digest,
            description=description,
            output_files=(request.output_filename, ),
        )
    })

    result = await Get[ExecuteProcessResult](
        MultiPlatformExecuteProcessRequest, execute_process_request)

    if pex_debug.might_log:
        lines = result.stderr.decode().splitlines()
        if lines:
            pex_debug.log(
                f"Debug output from Pex for: {execute_process_request}")
            for line in lines:
                pex_debug.log(line)

    return Pex(directory_digest=result.output_directory_digest,
               output_filename=request.output_filename)