Пример #1
0
    def __call__(
        self,
        requirements_relpath: str = "requirements.txt",
        *,
        module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
        type_stubs_module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
    ) -> None:
        """
        :param requirements_relpath: The relpath from this BUILD file to the requirements file.
            Defaults to a `requirements.txt` file sibling to the BUILD file.
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=requirements_relpath.replace(os.path.sep, "_"),
            sources=[requirements_relpath],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(type_stubs_module_mapping)

        req_file = Path(get_buildroot(), self._parse_context.rel_path, requirements_relpath)
        requirements = parse_requirements_file(
            req_file.read_text(), rel_path=str(req_file.relative_to(get_buildroot()))
        )

        grouped_requirements = groupby(requirements, lambda parsed_req: parsed_req.project_name)

        for project_name, parsed_reqs_ in grouped_requirements:
            normalized_proj_name = canonicalize_project_name(project_name)
            req_module_mapping = (
                {normalized_proj_name: normalized_module_mapping[normalized_proj_name]}
                if normalized_proj_name in normalized_module_mapping
                else {}
            )
            req_stubs_mapping = (
                {normalized_proj_name: normalized_type_stubs_module_mapping[normalized_proj_name]}
                if normalized_proj_name in normalized_type_stubs_module_mapping
                else {}
            )
            self._parse_context.create_object(
                "python_requirement_library",
                name=project_name,
                requirements=list(parsed_reqs_),
                module_mapping=req_module_mapping,
                type_stubs_module_mapping=req_stubs_mapping,
                dependencies=[requirements_dep],
            )
Пример #2
0
    def __call__(
        self,
        *,
        source: str = "requirements.txt",
        module_mapping: Mapping[str, Iterable[str]] | None = None,
        type_stubs_module_mapping: Mapping[str, Iterable[str]] | None = None,
        overrides: OVERRIDES_TYPE = None,
    ) -> None:
        """
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=source.replace(os.path.sep, "_"),
            sources=[source],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        req_file = Path(get_buildroot(), self._parse_context.rel_path, source)
        requirements = parse_requirements_file(
            req_file.read_text(),
            rel_path=str(req_file.relative_to(get_buildroot())))

        dependencies_overrides = flatten_overrides_to_dependency_field(
            overrides,
            macro_name="python_requirements",
            build_file_dir=self._parse_context.rel_path)
        grouped_requirements = groupby(
            requirements, lambda parsed_req: parsed_req.project_name)

        for project_name, parsed_reqs_ in grouped_requirements:
            normalized_proj_name = canonicalize_project_name(project_name)
            self._parse_context.create_object(
                "python_requirement",
                name=project_name,
                requirements=list(parsed_reqs_),
                modules=normalized_module_mapping.get(normalized_proj_name),
                type_stub_modules=normalized_type_stubs_module_mapping.get(
                    normalized_proj_name),
                dependencies=[
                    requirements_dep,
                    *dependencies_overrides.get(normalized_proj_name, []),
                ],
            )
Пример #3
0
def test_parse_requirements_file() -> None:
    content = dedent("""\
        # Comment.
        --find-links=https://duckduckgo.com
        ansicolors>=1.18.0
        Django==3.2 ; python_version>'3'
        Un-Normalized-PROJECT  # Inline comment.
        pip@ git+https://github.com/pypa/pip.git
        """)
    assert set(parse_requirements_file(content, rel_path="foo.txt")) == {
        Requirement.parse("ansicolors>=1.18.0"),
        Requirement.parse("Django==3.2 ; python_version>'3'"),
        Requirement.parse("Un-Normalized-PROJECT"),
        Requirement.parse("pip@ git+https://github.com/pypa/pip.git"),
    }
Пример #4
0
async def determine_global_requirement_constraints(
    python_setup: PythonSetup, ) -> GlobalRequirementConstraints:
    if not python_setup.requirement_constraints:
        return GlobalRequirementConstraints()

    constraints_file_contents = await Get(
        DigestContents,
        PathGlobs(
            [python_setup.requirement_constraints],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=
            "the option `[python].requirement_constraints`",
        ),
    )

    return GlobalRequirementConstraints(
        parse_requirements_file(
            constraints_file_contents[0].content.decode(),
            rel_path=constraints_file_contents[0].path,
        ))
Пример #5
0
    def __call__(
        self,
        requirements_relpath: str = "requirements.txt",
        *,
        module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
    ) -> None:
        """
        :param requirements_relpath: The relpath from this BUILD file to the requirements file.
            Defaults to a `requirements.txt` file sibling to the BUILD file.
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=requirements_relpath.replace(os.path.sep, "_"),
            sources=[requirements_relpath],
        )
        requirements_dep = f":{req_file_tgt.name}"

        req_file = Path(get_buildroot(), self._parse_context.rel_path, requirements_relpath)
        requirements = parse_requirements_file(
            req_file.read_text(), rel_path=str(req_file.relative_to(get_buildroot()))
        )
        for parsed_req in requirements:
            req_module_mapping = (
                {parsed_req.project_name: module_mapping[parsed_req.project_name]}
                if module_mapping and parsed_req.project_name in module_mapping
                else None
            )
            self._parse_context.create_object(
                "python_requirement_library",
                name=parsed_req.project_name,
                requirements=[parsed_req],
                module_mapping=req_module_mapping,
                dependencies=[requirements_dep],
            )
Пример #6
0
def test_parse_requirements_file() -> None:
    content = dedent(
        r"""\
        # Comment.
        --find-links=https://duckduckgo.com
        -r more_reqs.txt
        ansicolors>=1.18.0
        Django==3.2 ; python_version>'3'
        Un-Normalized-PROJECT  # Inline comment.
        pip@ git+https://github.com/pypa/pip.git
        setuptools==54.1.2; python_version >= "3.6" \
            --hash=sha256:dd20743f36b93cbb8724f4d2ccd970dce8b6e6e823a13aa7e5751bb4e674c20b \
            --hash=sha256:ebd0148faf627b569c8d2a1b20f5d3b09c873f12739d71c7ee88f037d5be82ff
        wheel==1.2.3 --hash=sha256:dd20743f36b93cbb8724f4d2ccd970dce8b6e6e823a13aa7e5751bb4e674c20b
        """
    )
    assert set(parse_requirements_file(content, rel_path="foo.txt")) == {
        PipRequirement.parse("ansicolors>=1.18.0"),
        PipRequirement.parse("Django==3.2 ; python_version>'3'"),
        PipRequirement.parse("Un-Normalized-PROJECT"),
        PipRequirement.parse("pip@ git+https://github.com/pypa/pip.git"),
        PipRequirement.parse("setuptools==54.1.2; python_version >= '3.6'"),
        PipRequirement.parse("wheel==1.2.3"),
    }
Пример #7
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            )
        )

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
        name_reqs = set()  # E.g., foobar>=1.2.3
        name_req_projects = set()

        for req_str in exact_reqs:
            req = Requirement.parse(req_str)
            if req.url:  # type: ignore[attr-defined]
                url_reqs.add(req)
            else:
                name_reqs.add(req)
                name_req_projects.add(canonicalize_project_name(req.project_name))

        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        # Constraints files must only contain name reqs, not URL reqs (those are already
        # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
        unconstrained_projects = name_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints
                else f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints:
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements."
                )
            else:
                # To get a full set of requirements we must add the URL requirements to the
                # constraints file, since the latter cannot contain URL requirements.
                # NB: We can only add the URL requirements we know about here, i.e., those that
                #  are transitive deps of the targets in play. There may be others in the repo.
                #  So we may end up creating a few different repository pexes, each with identical
                #  name requirements but different subsets of URL requirements. Fortunately since
                #  all these repository pexes will have identical pinned versions of everything,
                #  this is not a correctness issue, only a performance one.
                # TODO: Address this as part of providing proper lockfile support. However we
                #  generate lockfiles, they must be able to include URL requirements.
                all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(all_constraints),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                        additional_args=["-vvv"],
                    ),
                )
    elif (
        python_setup.resolve_all_constraints
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            "[python-setup].resolve_all_constraints is enabled, so either "
            "[python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Пример #8
0
async def generate_from_python_requirement(
    request: GenerateFromPythonRequirementsRequest, python_setup: PythonSetup
) -> GeneratedTargets:
    generator = request.generator
    requirements_rel_path = generator[PythonRequirementsSourceField].value
    requirements_full_path = generator[PythonRequirementsSourceField].file_path
    overrides = {
        canonicalize_project_name(k): v
        for k, v in request.require_unparametrized_overrides().items()
    }

    file_tgt = TargetGeneratorSourcesHelperTarget(
        {TargetGeneratorSourcesHelperSourcesField.alias: [requirements_rel_path]},
        Address(
            generator.address.spec_path,
            target_name=generator.address.target_name,
            relative_file_path=requirements_rel_path,
        ),
    )

    digest_contents = await Get(
        DigestContents,
        PathGlobs(
            [requirements_full_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=f"{generator}'s field `{PythonRequirementsSourceField.alias}`",
        ),
    )
    requirements = parse_requirements_file(
        digest_contents[0].content.decode(), rel_path=requirements_full_path
    )
    grouped_requirements = itertools.groupby(
        requirements, lambda parsed_req: parsed_req.project_name
    )

    # Validate the resolve is legal.
    generator[PythonRequirementResolveField].normalized_value(python_setup)

    module_mapping = generator[ModuleMappingField].value
    stubs_mapping = generator[TypeStubsModuleMappingField].value
    inherited_fields = {
        field.alias: field.value
        for field in request.generator.field_values.values()
        if isinstance(field, (*COMMON_TARGET_FIELDS, PythonRequirementResolveField))
    }

    def generate_tgt(
        project_name: str, parsed_reqs: Iterable[PipRequirement]
    ) -> PythonRequirementTarget:
        normalized_proj_name = canonicalize_project_name(project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [
                file_tgt.address.spec
            ]

        return PythonRequirementTarget(
            {
                **inherited_fields,
                PythonRequirementsField.alias: list(parsed_reqs),
                PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(
                    normalized_proj_name
                ),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            generator.address.create_generated(project_name),
        )

    result = tuple(
        generate_tgt(project_name, parsed_reqs_)
        for project_name, parsed_reqs_ in grouped_requirements
    ) + (file_tgt,)

    if overrides:
        raise InvalidFieldException(
            f"Unused key in the `overrides` field for {request.generator.address}: "
            f"{sorted(overrides)}"
        )

    return GeneratedTargets(generator, result)
Пример #9
0
 def validate_python_requirements(path: str, contents: bytes) -> None:
     for _ in parse_requirements_file(contents.decode(), rel_path=path):
         pass
Пример #10
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    description = request.description

    if python_setup.requirement_constraints:
        # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs
        }
        constraints_file_contents = await Get(
            DigestContents,
            PathGlobs(
                [python_setup.requirement_constraints],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin="the option `--python-setup-requirement-constraints`",
            ),
        )
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=python_setup.requirement_constraints,
            )
        )
        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            logger.warning(
                f"The constraints file {python_setup.requirement_constraints} does not contain "
                f"entries for the following requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
            python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES
            and request.internal_only
        ):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring resolve_all_constraints setting in [python_setup] scope "
                    "because constraints file does not cover all requirements."
                )
            else:
                requirements = PexRequirements(str(req) for req in constraints_file_reqs)
                description = description or f"Resolving {python_setup.requirement_constraints}"
    elif (
        python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            f"[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            f"[python-setup].requirement_constraints must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=description,
    )
Пример #11
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup)
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest,
                                              constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            ))

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints else
                f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}")

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements.")
            else:
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=
                        f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(
                            str(req) for req in constraints_file_reqs),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                    ),
                )
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            "[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            "either [python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Пример #12
0
async def _setup_constraints_repository_pex(
    request: _ConstraintsRepositoryPexRequest, python_setup: PythonSetup
) -> _ConstraintsRepositoryPex:
    # NB: it isn't safe to resolve against the whole constraints file if
    # platforms are in use. See https://github.com/pantsbuild/pants/issues/12222.
    if not python_setup.resolve_all_constraints or request.platforms:
        return _ConstraintsRepositoryPex(None)

    constraints_path = python_setup.requirement_constraints
    assert constraints_path is not None

    constraints_file_contents = await Get(
        DigestContents,
        PathGlobs(
            [constraints_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `[python].requirement_constraints`",
        ),
    )
    constraints_file_reqs = set(
        parse_requirements_file(
            constraints_file_contents[0].content.decode(), rel_path=constraints_path
        )
    )

    # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
    # packaging canonicalize for us.
    # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
    url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
    name_reqs = set()  # E.g., foobar>=1.2.3
    name_req_projects = set()

    for req_str in request.requirements.req_strings:
        req = PipRequirement.parse(req_str)
        if req.url:
            url_reqs.add(req)
        else:
            name_reqs.add(req)
            name_req_projects.add(canonicalize_project_name(req.project_name))

    constraint_file_projects = {
        canonicalize_project_name(req.project_name) for req in constraints_file_reqs
    }
    # Constraints files must only contain name reqs, not URL reqs (those are already
    # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
    unconstrained_projects = name_req_projects - constraint_file_projects
    if unconstrained_projects:
        logger.warning(
            f"The constraints file {constraints_path} does not contain "
            f"entries for the following requirements: {', '.join(unconstrained_projects)}.\n\n"
            f"Ignoring `[python_setup].resolve_all_constraints` option."
        )
        return _ConstraintsRepositoryPex(None)

    # To get a full set of requirements we must add the URL requirements to the
    # constraints file, since the latter cannot contain URL requirements.
    # NB: We can only add the URL requirements we know about here, i.e., those that
    #  are transitive deps of the targets in play. There may be others in the repo.
    #  So we may end up creating a few different repository pexes, each with identical
    #  name requirements but different subsets of URL requirements. Fortunately since
    #  all these repository pexes will have identical pinned versions of everything,
    #  this is not a correctness issue, only a performance one.
    all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
    repository_pex = await Get(
        Pex,
        PexRequest(
            description=f"Resolving {constraints_path}",
            output_filename="repository.pex",
            internal_only=request.internal_only,
            requirements=PexRequirements(
                all_constraints,
                apply_constraints=True,
                # TODO: See PexRequirements docs.
                is_all_constraints_resolve=True,
            ),
            interpreter_constraints=request.interpreter_constraints,
            platforms=request.platforms,
            additional_args=request.additional_lockfile_args,
        ),
    )
    return _ConstraintsRepositoryPex(repository_pex)
Пример #13
0
async def load_lockfile(
    request: LoadedLockfileRequest,
    python_setup: PythonSetup,
) -> LoadedLockfile:
    lockfile = request.lockfile
    if isinstance(lockfile, Lockfile):
        synthetic_lock = False
        lockfile_path = lockfile.file_path
        lockfile_digest = await Get(
            Digest,
            PathGlobs(
                [lockfile_path],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                description_of_origin=lockfile.file_path_description_of_origin,
            ),
        )
        _digest_contents = await Get(DigestContents, Digest, lockfile_digest)
        lock_bytes = _digest_contents[0].content
    else:
        synthetic_lock = True
        _fc = lockfile.file_content
        lockfile_path, lock_bytes = (_fc.path, _fc.content)
        lockfile_digest = await Get(Digest, CreateDigest([_fc]))

    is_pex_native = is_probably_pex_json_lockfile(lock_bytes)
    if is_pex_native:
        header_delimiter = "//"
        lockfile_digest = await Get(
            Digest,
            CreateDigest(
                [FileContent(lockfile_path, _strip_comments_from_pex_json_lockfile(lock_bytes))]
            ),
        )
        requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
        constraints_strings = None
    else:
        header_delimiter = "#"
        lock_string = lock_bytes.decode()
        # Note: this is a very naive heuristic. It will overcount because entries often
        # have >1 line due to `--hash`.
        requirement_estimate = len(lock_string.splitlines())
        constraints_strings = FrozenOrderedSet(
            str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
        )

    metadata: PythonLockfileMetadata | None = None
    if should_validate_metadata(lockfile, python_setup):
        metadata = PythonLockfileMetadata.from_lockfile(
            lock_bytes,
            **(dict() if synthetic_lock else dict(lockfile_path=lockfile_path)),
            resolve_name=lockfile.resolve_name,
            delimeter=header_delimiter,
        )

    return LoadedLockfile(
        lockfile_digest,
        lockfile_path,
        metadata,
        requirement_estimate,
        is_pex_native,
        constraints_strings,
        original_lockfile=lockfile,
    )