Esempio n. 1
0
def test_default_module_mapping_is_normalized() -> None:
    for k in DEFAULT_MODULE_MAPPING:
        assert k == canonicalize_project_name(
            k
        ), "Please update `DEFAULT_MODULE_MAPPING` to use canonical project names"
    for k in DEFAULT_TYPE_STUB_MODULE_MAPPING:
        assert k == canonicalize_project_name(
            k
        ), "Please update `DEFAULT_TYPE_STUB_MODULE_MAPPING` to use canonical project names"
Esempio n. 2
0
def normalize_module_mapping(
    mapping: Mapping[str, Iterable[str]] | None
) -> FrozenDict[str, tuple[str, ...]]:
    return FrozenDict({
        canonicalize_project_name(k): tuple(v)
        for k, v in (mapping or {}).items()
    })
Esempio n. 3
0
    def generate_tgt(
        project_name: str, parsed_reqs: Iterable[PipRequirement]
    ) -> PythonRequirementTarget:
        normalized_proj_name = canonicalize_project_name(project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [
                file_tgt.address.spec
            ]

        return PythonRequirementTarget(
            {
                **inherited_fields,
                PythonRequirementsField.alias: list(parsed_reqs),
                PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(
                    normalized_proj_name
                ),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            generator.address.create_generated(project_name),
        )
Esempio n. 4
0
    def generate_tgt(raw_req: str, info: dict) -> PythonRequirementTarget:
        if info.get("extras"):
            raw_req += f"[{','.join(info['extras'])}]"
        raw_req += info.get("version", "")
        if info.get("markers"):
            raw_req += f";{info['markers']}"

        parsed_req = PipRequirement.parse(raw_req)
        normalized_proj_name = canonicalize_project_name(
            parsed_req.project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(
                tgt_overrides[Dependencies.alias]) + [file_tgt.address.spec]

        return PythonRequirementTarget(
            {
                **inherited_fields,
                PythonRequirementsField.alias: [parsed_req],
                PythonRequirementModulesField.alias:
                module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias:
                stubs_mapping.get(normalized_proj_name),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            generator.address.create_generated(parsed_req.project_name),
        )
Esempio n. 5
0
    def __call__(
        self,
        requirements_relpath: str = "requirements.txt",
        *,
        module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
        type_stubs_module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
    ) -> None:
        """
        :param requirements_relpath: The relpath from this BUILD file to the requirements file.
            Defaults to a `requirements.txt` file sibling to the BUILD file.
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=requirements_relpath.replace(os.path.sep, "_"),
            sources=[requirements_relpath],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(type_stubs_module_mapping)

        req_file = Path(get_buildroot(), self._parse_context.rel_path, requirements_relpath)
        requirements = parse_requirements_file(
            req_file.read_text(), rel_path=str(req_file.relative_to(get_buildroot()))
        )

        grouped_requirements = groupby(requirements, lambda parsed_req: parsed_req.project_name)

        for project_name, parsed_reqs_ in grouped_requirements:
            normalized_proj_name = canonicalize_project_name(project_name)
            req_module_mapping = (
                {normalized_proj_name: normalized_module_mapping[normalized_proj_name]}
                if normalized_proj_name in normalized_module_mapping
                else {}
            )
            req_stubs_mapping = (
                {normalized_proj_name: normalized_type_stubs_module_mapping[normalized_proj_name]}
                if normalized_proj_name in normalized_type_stubs_module_mapping
                else {}
            )
            self._parse_context.create_object(
                "python_requirement_library",
                name=project_name,
                requirements=list(parsed_reqs_),
                module_mapping=req_module_mapping,
                type_stubs_module_mapping=req_stubs_mapping,
                dependencies=[requirements_dep],
            )
Esempio n. 6
0
    def __call__(
        self,
        *,
        source: str = "requirements.txt",
        module_mapping: Mapping[str, Iterable[str]] | None = None,
        type_stubs_module_mapping: Mapping[str, Iterable[str]] | None = None,
        overrides: OVERRIDES_TYPE = None,
    ) -> None:
        """
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=source.replace(os.path.sep, "_"),
            sources=[source],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        req_file = Path(get_buildroot(), self._parse_context.rel_path, source)
        requirements = parse_requirements_file(
            req_file.read_text(),
            rel_path=str(req_file.relative_to(get_buildroot())))

        dependencies_overrides = flatten_overrides_to_dependency_field(
            overrides,
            macro_name="python_requirements",
            build_file_dir=self._parse_context.rel_path)
        grouped_requirements = groupby(
            requirements, lambda parsed_req: parsed_req.project_name)

        for project_name, parsed_reqs_ in grouped_requirements:
            normalized_proj_name = canonicalize_project_name(project_name)
            self._parse_context.create_object(
                "python_requirement",
                name=project_name,
                requirements=list(parsed_reqs_),
                modules=normalized_module_mapping.get(normalized_proj_name),
                type_stub_modules=normalized_type_stubs_module_mapping.get(
                    normalized_proj_name),
                dependencies=[
                    requirements_dep,
                    *dependencies_overrides.get(normalized_proj_name, []),
                ],
            )
Esempio n. 7
0
    def __call__(
        self,
        pyproject_toml_relpath: str = "pyproject.toml",
        *,
        module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
        type_stubs_module_mapping: Optional[Mapping[str,
                                                    Iterable[str]]] = None,
    ) -> None:
        """
        :param pyproject_toml_relpath: The relpath from this BUILD file to the requirements file.
            Defaults to a `requirements.txt` file sibling to the BUILD file.
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            "_python_requirements_file",
            name=pyproject_toml_relpath.replace(os.path.sep, "_"),
            sources=[pyproject_toml_relpath],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        requirements = parse_pyproject_toml(
            PyProjectToml.create(self._parse_context, pyproject_toml_relpath))
        for parsed_req in requirements:
            normalized_proj_name = canonicalize_project_name(
                parsed_req.project_name)
            req_module_mapping = ({
                normalized_proj_name:
                normalized_module_mapping[normalized_proj_name]
            } if normalized_proj_name in normalized_module_mapping else {})
            req_stubs_mapping = ({
                normalized_proj_name:
                normalized_type_stubs_module_mapping[normalized_proj_name]
            } if normalized_proj_name in normalized_type_stubs_module_mapping
                                 else {})

            self._parse_context.create_object(
                "python_requirement_library",
                name=parsed_req.project_name,
                requirements=[parsed_req],
                module_mapping=req_module_mapping,
                type_stubs_module_mapping=req_stubs_mapping,
                dependencies=[requirements_dep],
            )
    def __call__(
        self,
        *,
        source: str = "pyproject.toml",
        module_mapping: Mapping[str, Iterable[str]] | None = None,
        type_stubs_module_mapping: Mapping[str, Iterable[str]] | None = None,
        overrides: OVERRIDES_TYPE = None,
    ) -> None:
        """
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        """
        req_file_tgt = self._parse_context.create_object(
            TargetGeneratorSourcesHelperTarget.alias,
            name=source.replace(os.path.sep, "_"),
            sources=[source],
        )
        requirements_dep = f":{req_file_tgt.name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        dependencies_overrides = flatten_overrides_to_dependency_field(
            overrides,
            macro_name="python_requirements",
            build_file_dir=self._parse_context.rel_path)

        requirements = parse_pyproject_toml(
            PyProjectToml.deprecated_macro_create(self._parse_context, source))
        for parsed_req in requirements:
            normalized_proj_name = canonicalize_project_name(
                parsed_req.project_name)
            self._parse_context.create_object(
                "python_requirement",
                name=parsed_req.project_name,
                requirements=[parsed_req],
                modules=normalized_module_mapping.get(normalized_proj_name),
                type_stub_modules=normalized_type_stubs_module_mapping.get(
                    normalized_proj_name),
                dependencies=[
                    requirements_dep,
                    *dependencies_overrides.get(normalized_proj_name, []),
                ],
            )
Esempio n. 9
0
def assert_pants_requirement(
    rule_runner: RuleRunner,
    build_file_entry: str,
    *,
    expected_target_name: str,
    expected_dist: str = "pantsbuild.pants",
    expected_module: str = "pants",
) -> None:
    rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n")
    target = rule_runner.get_target(
        Address("3rdparty/python", target_name=expected_target_name))
    assert isinstance(target, PythonRequirementLibrary)
    assert target[PythonRequirementsField].value == (
        Requirement.parse(f"{expected_dist}=={pants_version()}"), )
    module_mapping = target[ModuleMappingField].value
    assert isinstance(module_mapping, FrozenDict)
    assert module_mapping.get(
        canonicalize_project_name(expected_dist)) == (expected_module, )
Esempio n. 10
0
    def validate_pytest_cov_included(self) -> None:
        for s in self.extra_requirements:
            try:
                req = Requirement.parse(s).project_name
            except Exception as e:
                raise ValueError(
                    format_invalid_requirement_string_error(
                        s,
                        e,
                        description_of_origin="`[pytest].extra_requirements`"))
            if canonicalize_project_name(req) == "pytest-cov":
                return

        raise ValueError(
            "You set `[test].use_coverage`, but `[pytest].extra_requirements` is missing "
            "`pytest-cov`, which is needed to collect coverage data.\n\nThis happens when "
            "overriding the `extra_requirements` option. Please either explicitly add back "
            "`pytest-cov` or use `extra_requirements.add` to keep Pants's default, rather than "
            "overriding it. Run `./pants help-advanced pytest` to see the default version of "
            f"`pytest-cov` and see {doc_url('options#list-values')} for more on adding vs. "
            "overriding list options.")
Esempio n. 11
0
    def validate_pytest_cov_included(self) -> None:
        for s in self.extra_requirements:
            try:
                req = PipRequirement.parse(s).project_name
            except Exception as e:
                raise ValueError(
                    f"Invalid requirement '{s}' in `[pytest].extra_requirements`: {e}"
                )
            if canonicalize_project_name(req) == "pytest-cov":
                return

        raise ValueError(
            softwrap(f"""
                You set `[test].use_coverage`, but `[pytest].extra_requirements` is missing
                `pytest-cov`, which is needed to collect coverage data.

                This happens when overriding the `extra_requirements` option. Please either explicitly
                add back `pytest-cov` or use `extra_requirements.add` to keep Pants's default, rather than
                overriding it. Run `{bin_name()} help-advanced pytest` to see the default version of
                `pytest-cov` and see {doc_url('options#list-values')} for more on adding vs.
                overriding list options.
                """))
Esempio n. 12
0
    def generate_tgt(parsed_req: PipRequirement) -> PythonRequirementTarget:
        normalized_proj_name = canonicalize_project_name(parsed_req.project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [
                file_tgt.address.spec
            ]

        return PythonRequirementTarget(
            {
                **request.template,
                PythonRequirementsField.alias: [parsed_req],
                PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(
                    normalized_proj_name
                ),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            request.template_address.create_generated(parsed_req.project_name),
            union_membership,
        )
Esempio n. 13
0
    def __call__(
        self,
        requirements_relpath: str = "Pipfile.lock",
        module_mapping: Optional[Mapping[str, Iterable[str]]] = None,
        type_stubs_module_mapping: Optional[Mapping[str,
                                                    Iterable[str]]] = None,
        pipfile_target: Optional[str] = None,
    ) -> None:
        """
        :param requirements_relpath: The relpath from this BUILD file to the requirements file.
            Defaults to a `Pipfile.lock` file sibling to the BUILD file.
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        :param pipfile_target: a `_python_requirements_file` target to provide for cache invalidation
        if the requirements_relpath value is not in the current rel_path
        """

        requirements_path = Path(get_buildroot(), self._parse_context.rel_path,
                                 requirements_relpath)
        lock_info = json.loads(requirements_path.read_text())

        if pipfile_target:
            requirements_dep = pipfile_target
        else:
            requirements_file_target_name = requirements_relpath
            self._parse_context.create_object(
                "_python_requirements_file",
                name=requirements_file_target_name,
                sources=[requirements_relpath],
            )
            requirements_dep = f":{requirements_file_target_name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        requirements = {
            **lock_info.get("default", {}),
            **lock_info.get("develop", {})
        }
        for req, info in requirements.items():
            extras = [x for x in info.get("extras", [])]
            extras_str = f"[{','.join(extras)}]" if extras else ""
            req_str = f"{req}{extras_str}{info.get('version','')}"
            if info.get("markers"):
                req_str += f";{info['markers']}"

            parsed_req = Requirement.parse(req_str)

            normalized_proj_name = canonicalize_project_name(
                parsed_req.project_name)
            req_module_mapping = ({
                normalized_proj_name:
                normalized_module_mapping[normalized_proj_name]
            } if normalized_proj_name in normalized_module_mapping else {})
            req_stubs_mapping = ({
                normalized_proj_name:
                normalized_type_stubs_module_mapping[normalized_proj_name]
            } if normalized_proj_name in normalized_type_stubs_module_mapping
                                 else {})

            self._parse_context.create_object(
                "python_requirement_library",
                name=parsed_req.project_name,
                requirements=[parsed_req],
                dependencies=[requirements_dep],
                module_mapping=req_module_mapping,
                type_stubs_module_mapping=req_stubs_mapping,
            )
Esempio n. 14
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses)
    all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (
            tgt[PythonInterpreterCompatibility]
            for tgt in all_targets
            if tgt.has_field(PythonInterpreterCompatibility)
        ),
        python_setup,
    )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    description = request.description

    if python_setup.requirement_constraints:
        # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs
        }
        constraints_file_contents = await Get(
            DigestContents,
            PathGlobs(
                [python_setup.requirement_constraints],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin="the option `--python-setup-requirement-constraints`",
            ),
        )
        constraints_file_reqs = set(
            parse_requirements(next(iter(constraints_file_contents)).content.decode())
        )
        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            logger.warning(
                f"The constraints file {python_setup.requirement_constraints} does not contain "
                f"entries for the following requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
            python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES
            and request.internal_only
        ):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring resolve_all_constraints setting in [python_setup] scope "
                    "because constraints file does not cover all requirements."
                )
            else:
                requirements = PexRequirements(str(req) for req in constraints_file_reqs)
                description = description or f"Resolving {python_setup.requirement_constraints}"
    elif (
        python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            f"[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            f"[python-setup].requirement_constraints must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 15
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            )
        )

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
        name_reqs = set()  # E.g., foobar>=1.2.3
        name_req_projects = set()

        for req_str in exact_reqs:
            req = Requirement.parse(req_str)
            if req.url:  # type: ignore[attr-defined]
                url_reqs.add(req)
            else:
                name_reqs.add(req)
                name_req_projects.add(canonicalize_project_name(req.project_name))

        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        # Constraints files must only contain name reqs, not URL reqs (those are already
        # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
        unconstrained_projects = name_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints
                else f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints:
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements."
                )
            else:
                # To get a full set of requirements we must add the URL requirements to the
                # constraints file, since the latter cannot contain URL requirements.
                # NB: We can only add the URL requirements we know about here, i.e., those that
                #  are transitive deps of the targets in play. There may be others in the repo.
                #  So we may end up creating a few different repository pexes, each with identical
                #  name requirements but different subsets of URL requirements. Fortunately since
                #  all these repository pexes will have identical pinned versions of everything,
                #  this is not a correctness issue, only a performance one.
                # TODO: Address this as part of providing proper lockfile support. However we
                #  generate lockfiles, they must be able to include URL requirements.
                all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(all_constraints),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                        additional_args=["-vvv"],
                    ),
                )
    elif (
        python_setup.resolve_all_constraints
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            "[python-setup].resolve_all_constraints is enabled, so either "
            "[python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 16
0
    def __call__(
        self,
        *,
        source: str = "Pipfile.lock",
        module_mapping: Mapping[str, Iterable[str]] | None = None,
        type_stubs_module_mapping: Mapping[str, Iterable[str]] | None = None,
        pipfile_target: str | None = None,
        overrides: OVERRIDES_TYPE = None,
    ) -> None:
        """
        :param module_mapping: a mapping of requirement names to a list of the modules they provide.
            For example, `{"ansicolors": ["colors"]}`. Any unspecified requirements will use the
            requirement name as the default module, e.g. "Django" will default to
            `modules=["django"]`.
        :param pipfile_target: a `_python_requirements_file` target to provide for cache invalidation
        if the requirements_relpath value is not in the current rel_path
        """
        requirements_path = Path(get_buildroot(), self._parse_context.rel_path,
                                 source)
        lock_info = json.loads(requirements_path.read_text())

        if pipfile_target:
            requirements_dep = pipfile_target
        else:
            requirements_file_target_name = source
            self._parse_context.create_object(
                "_python_requirements_file",
                name=requirements_file_target_name,
                sources=[source],
            )
            requirements_dep = f":{requirements_file_target_name}"

        normalized_module_mapping = normalize_module_mapping(module_mapping)
        normalized_type_stubs_module_mapping = normalize_module_mapping(
            type_stubs_module_mapping)

        dependencies_overrides = flatten_overrides_to_dependency_field(
            overrides,
            macro_name="python_requirements",
            build_file_dir=self._parse_context.rel_path)

        requirements = {
            **lock_info.get("default", {}),
            **lock_info.get("develop", {})
        }
        for req, info in requirements.items():
            extras = [x for x in info.get("extras", [])]
            extras_str = f"[{','.join(extras)}]" if extras else ""
            req_str = f"{req}{extras_str}{info.get('version','')}"
            if info.get("markers"):
                req_str += f";{info['markers']}"

            parsed_req = PipRequirement.parse(req_str)
            normalized_proj_name = canonicalize_project_name(
                parsed_req.project_name)
            self._parse_context.create_object(
                "python_requirement",
                name=parsed_req.project_name,
                requirements=[parsed_req],
                dependencies=[
                    requirements_dep,
                    *dependencies_overrides.get(normalized_proj_name, []),
                ],
                modules=normalized_module_mapping.get(normalized_proj_name),
                type_stub_modules=normalized_type_stubs_module_mapping.get(
                    normalized_proj_name),
            )
Esempio n. 17
0
async def generate_from_python_requirement(
    request: GenerateFromPythonRequirementsRequest, python_setup: PythonSetup
) -> GeneratedTargets:
    generator = request.generator
    requirements_rel_path = generator[PythonRequirementsSourceField].value
    requirements_full_path = generator[PythonRequirementsSourceField].file_path
    overrides = {
        canonicalize_project_name(k): v
        for k, v in request.require_unparametrized_overrides().items()
    }

    file_tgt = TargetGeneratorSourcesHelperTarget(
        {TargetGeneratorSourcesHelperSourcesField.alias: [requirements_rel_path]},
        Address(
            generator.address.spec_path,
            target_name=generator.address.target_name,
            relative_file_path=requirements_rel_path,
        ),
    )

    digest_contents = await Get(
        DigestContents,
        PathGlobs(
            [requirements_full_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=f"{generator}'s field `{PythonRequirementsSourceField.alias}`",
        ),
    )
    requirements = parse_requirements_file(
        digest_contents[0].content.decode(), rel_path=requirements_full_path
    )
    grouped_requirements = itertools.groupby(
        requirements, lambda parsed_req: parsed_req.project_name
    )

    # Validate the resolve is legal.
    generator[PythonRequirementResolveField].normalized_value(python_setup)

    module_mapping = generator[ModuleMappingField].value
    stubs_mapping = generator[TypeStubsModuleMappingField].value
    inherited_fields = {
        field.alias: field.value
        for field in request.generator.field_values.values()
        if isinstance(field, (*COMMON_TARGET_FIELDS, PythonRequirementResolveField))
    }

    def generate_tgt(
        project_name: str, parsed_reqs: Iterable[PipRequirement]
    ) -> PythonRequirementTarget:
        normalized_proj_name = canonicalize_project_name(project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [
                file_tgt.address.spec
            ]

        return PythonRequirementTarget(
            {
                **inherited_fields,
                PythonRequirementsField.alias: list(parsed_reqs),
                PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(
                    normalized_proj_name
                ),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            generator.address.create_generated(project_name),
        )

    result = tuple(
        generate_tgt(project_name, parsed_reqs_)
        for project_name, parsed_reqs_ in grouped_requirements
    ) + (file_tgt,)

    if overrides:
        raise InvalidFieldException(
            f"Unused key in the `overrides` field for {request.generator.address}: "
            f"{sorted(overrides)}"
        )

    return GeneratedTargets(generator, result)
Esempio n. 18
0
async def _setup_constraints_repository_pex(
    request: _ConstraintsRepositoryPexRequest, python_setup: PythonSetup
) -> _ConstraintsRepositoryPex:
    # NB: it isn't safe to resolve against the whole constraints file if
    # platforms are in use. See https://github.com/pantsbuild/pants/issues/12222.
    if not python_setup.resolve_all_constraints or request.platforms:
        return _ConstraintsRepositoryPex(None)

    constraints_path = python_setup.requirement_constraints
    assert constraints_path is not None

    constraints_file_contents = await Get(
        DigestContents,
        PathGlobs(
            [constraints_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `[python].requirement_constraints`",
        ),
    )
    constraints_file_reqs = set(
        parse_requirements_file(
            constraints_file_contents[0].content.decode(), rel_path=constraints_path
        )
    )

    # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
    # packaging canonicalize for us.
    # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
    url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
    name_reqs = set()  # E.g., foobar>=1.2.3
    name_req_projects = set()

    for req_str in request.requirements.req_strings:
        req = PipRequirement.parse(req_str)
        if req.url:
            url_reqs.add(req)
        else:
            name_reqs.add(req)
            name_req_projects.add(canonicalize_project_name(req.project_name))

    constraint_file_projects = {
        canonicalize_project_name(req.project_name) for req in constraints_file_reqs
    }
    # Constraints files must only contain name reqs, not URL reqs (those are already
    # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
    unconstrained_projects = name_req_projects - constraint_file_projects
    if unconstrained_projects:
        logger.warning(
            f"The constraints file {constraints_path} does not contain "
            f"entries for the following requirements: {', '.join(unconstrained_projects)}.\n\n"
            f"Ignoring `[python_setup].resolve_all_constraints` option."
        )
        return _ConstraintsRepositoryPex(None)

    # To get a full set of requirements we must add the URL requirements to the
    # constraints file, since the latter cannot contain URL requirements.
    # NB: We can only add the URL requirements we know about here, i.e., those that
    #  are transitive deps of the targets in play. There may be others in the repo.
    #  So we may end up creating a few different repository pexes, each with identical
    #  name requirements but different subsets of URL requirements. Fortunately since
    #  all these repository pexes will have identical pinned versions of everything,
    #  this is not a correctness issue, only a performance one.
    all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
    repository_pex = await Get(
        Pex,
        PexRequest(
            description=f"Resolving {constraints_path}",
            output_filename="repository.pex",
            internal_only=request.internal_only,
            requirements=PexRequirements(
                all_constraints,
                apply_constraints=True,
                # TODO: See PexRequirements docs.
                is_all_constraints_resolve=True,
            ),
            interpreter_constraints=request.interpreter_constraints,
            platforms=request.platforms,
            additional_args=request.additional_lockfile_args,
        ),
    )
    return _ConstraintsRepositoryPex(repository_pex)
Esempio n. 19
0
async def generate_from_python_requirement(
    request: GenerateFromPoetryRequirementsRequest,
    build_root: BuildRoot,
    union_membership: UnionMembership,
) -> GeneratedTargets:
    generator = request.generator
    pyproject_rel_path = generator[PoetryRequirementsSourceField].value
    pyproject_full_path = generator[PoetryRequirementsSourceField].file_path
    overrides = {
        canonicalize_project_name(k): v
        for k, v in request.require_unparametrized_overrides().items()
    }

    file_tgt = TargetGeneratorSourcesHelperTarget(
        {TargetGeneratorSourcesHelperSourcesField.alias: pyproject_rel_path},
        Address(
            request.template_address.spec_path,
            target_name=request.template_address.target_name,
            relative_file_path=pyproject_rel_path,
        ),
        union_membership,
    )

    digest_contents = await Get(
        DigestContents,
        PathGlobs(
            [pyproject_full_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=f"{generator}'s field `{PoetryRequirementsSourceField.alias}`",
        ),
    )

    requirements = parse_pyproject_toml(
        PyProjectToml(
            build_root=PurePath(build_root.path),
            toml_relpath=PurePath(pyproject_full_path),
            toml_contents=digest_contents[0].content.decode(),
        )
    )

    module_mapping = generator[ModuleMappingField].value
    stubs_mapping = generator[TypeStubsModuleMappingField].value

    def generate_tgt(parsed_req: PipRequirement) -> PythonRequirementTarget:
        normalized_proj_name = canonicalize_project_name(parsed_req.project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(tgt_overrides[Dependencies.alias]) + [
                file_tgt.address.spec
            ]

        return PythonRequirementTarget(
            {
                **request.template,
                PythonRequirementsField.alias: [parsed_req],
                PythonRequirementModulesField.alias: module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias: stubs_mapping.get(
                    normalized_proj_name
                ),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            request.template_address.create_generated(parsed_req.project_name),
            union_membership,
        )

    result = tuple(generate_tgt(requirement) for requirement in requirements) + (file_tgt,)

    if overrides:
        raise InvalidFieldException(
            softwrap(
                f"""
                Unused key in the `overrides` field for {request.template_address}:
                {sorted(overrides)}
                """
            )
        )

    return GeneratedTargets(generator, result)
Esempio n. 20
0
async def map_third_party_modules_to_addresses(
    all_python_tgts: AllPythonTargets,
    python_setup: PythonSetup,
) -> ThirdPartyPythonModuleMapping:
    resolves_to_modules_to_providers: DefaultDict[
        ResolveName, DefaultDict[str, list[ModuleProvider]]
    ] = defaultdict(lambda: defaultdict(list))

    for tgt in all_python_tgts.third_party:
        resolve = tgt[PythonRequirementResolveField].normalized_value(python_setup)

        def add_modules(modules: Iterable[str], *, type_stub: bool = False) -> None:
            for module in modules:
                resolves_to_modules_to_providers[resolve][module].append(
                    ModuleProvider(
                        tgt.address,
                        ModuleProviderType.TYPE_STUB if type_stub else ModuleProviderType.IMPL,
                    )
                )

        explicit_modules = tgt.get(PythonRequirementModulesField).value
        if explicit_modules:
            add_modules(explicit_modules)
            continue

        explicit_stub_modules = tgt.get(PythonRequirementTypeStubModulesField).value
        if explicit_stub_modules:
            add_modules(explicit_stub_modules, type_stub=True)
            continue

        # Else, fall back to defaults.
        for req in tgt[PythonRequirementsField].value:
            # NB: We don't use `canonicalize_project_name()` for the fallback value because we
            # want to preserve `.` in the module name. See
            # https://www.python.org/dev/peps/pep-0503/#normalized-names.
            proj_name = canonicalize_project_name(req.project_name)
            fallback_value = req.project_name.strip().lower().replace("-", "_")

            in_stubs_map = proj_name in DEFAULT_TYPE_STUB_MODULE_MAPPING
            starts_with_prefix = fallback_value.startswith(("types_", "stubs_"))
            ends_with_prefix = fallback_value.endswith(("_types", "_stubs"))
            if proj_name not in DEFAULT_MODULE_MAPPING and (
                in_stubs_map or starts_with_prefix or ends_with_prefix
            ):
                if in_stubs_map:
                    stub_modules = DEFAULT_TYPE_STUB_MODULE_MAPPING[proj_name]
                else:
                    stub_modules = (
                        fallback_value[6:] if starts_with_prefix else fallback_value[:-6],
                    )
                add_modules(stub_modules, type_stub=True)
            else:
                add_modules(DEFAULT_MODULE_MAPPING.get(proj_name, (fallback_value,)))

    return ThirdPartyPythonModuleMapping(
        (
            resolve,
            FrozenDict(
                (mod, tuple(sorted(providers))) for mod, providers in sorted(mapping.items())
            ),
        )
        for resolve, mapping in sorted(resolves_to_modules_to_providers.items())
    )
Esempio n. 21
0
async def map_third_party_modules_to_addresses(
) -> ThirdPartyPythonModuleMapping:
    all_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: dict[str, Address] = {}
    modules_to_stub_addresses: dict[str, Address] = {}
    modules_with_multiple_owners: DefaultDict[str,
                                              set[Address]] = defaultdict(set)
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        module_map = {
            **DEFAULT_MODULE_MAPPING,
            **tgt.get(ModuleMappingField).value
        }
        stubs_module_map = {
            **DEFAULT_TYPE_STUB_MODULE_MAPPING,
            **tgt.get(TypeStubsModuleMappingField).value,
        }
        for req in tgt[PythonRequirementsField].value:
            # NB: We don't use `canonicalize_project_name()` for the fallback value because we
            # want to preserve `.` in the module name. See
            # https://www.python.org/dev/peps/pep-0503/#normalized-names.
            proj_name = canonicalize_project_name(req.project_name)
            fallback_value = req.project_name.strip().lower().replace("-", "_")

            # Handle if it's a type stub.
            in_stubs_map = proj_name in stubs_module_map
            starts_with_prefix = fallback_value.startswith(
                ("types_", "stubs_"))
            ends_with_prefix = fallback_value.endswith(("_types", "_stubs"))
            if proj_name not in module_map and (in_stubs_map
                                                or starts_with_prefix
                                                or ends_with_prefix):
                if in_stubs_map:
                    modules = stubs_module_map[proj_name]
                else:
                    modules = (fallback_value[6:] if starts_with_prefix else
                               fallback_value[:-6], )

                for module in modules:
                    if module in modules_with_multiple_owners:
                        modules_with_multiple_owners[module].add(tgt.address)
                    elif module in modules_to_stub_addresses:
                        modules_with_multiple_owners[module].update(
                            {modules_to_stub_addresses[module], tgt.address})
                    else:
                        modules_to_stub_addresses[module] = tgt.address

            # Else it's a normal requirement.
            else:
                modules = module_map.get(proj_name, (fallback_value, ))
                for module in modules:
                    if module in modules_with_multiple_owners:
                        modules_with_multiple_owners[module].add(tgt.address)
                    elif module in modules_to_addresses:
                        modules_with_multiple_owners[module].update(
                            {modules_to_addresses[module], tgt.address})
                    else:
                        modules_to_addresses[module] = tgt.address

    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        if module in modules_to_addresses:
            modules_to_addresses.pop(module)
        if module in modules_to_stub_addresses:
            modules_to_stub_addresses.pop(module)

    merged_mapping: DefaultDict[str, list[Address]] = defaultdict(list)
    for k, v in modules_to_addresses.items():
        merged_mapping[k].append(v)
    for k, v in modules_to_stub_addresses.items():
        merged_mapping[k].append(v)

    return ThirdPartyPythonModuleMapping(
        mapping=FrozenDict(
            (k, tuple(sorted(v))) for k, v in sorted(merged_mapping.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(modules_with_multiple_owners.items())),
    )
Esempio n. 22
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup)
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest,
                                              constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            ))

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints else
                f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}")

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements.")
            else:
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=
                        f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(
                            str(req) for req in constraints_file_reqs),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                    ),
                )
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            "[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            "either [python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 23
0
async def _setup_constraints_repository_pex(
    constraints_request: _ConstraintsRepositoryPexRequest,
    python_setup: PythonSetup,
    global_requirement_constraints: GlobalRequirementConstraints,
) -> OptionalPexRequest:
    request = constraints_request.repository_pex_request
    if not python_setup.resolve_all_constraints:
        return OptionalPexRequest(None)

    constraints_path = python_setup.requirement_constraints
    assert constraints_path is not None

    transitive_targets = await Get(TransitiveTargets,
                                   TransitiveTargetsRequest(request.addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField] for tgt in transitive_targets.closure
         if tgt.has_field(PythonRequirementsField)),
        constraints_strings=(str(constraint)
                             for constraint in global_requirement_constraints),
    )

    # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
    # packaging canonicalize for us.
    # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
    url_reqs = set(
    )  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
    name_reqs = set()  # E.g., foobar>=1.2.3
    name_req_projects = set()
    constraints_file_reqs = set(global_requirement_constraints)

    for req_str in requirements.req_strings:
        req = PipRequirement.parse(req_str)
        if req.url:
            url_reqs.add(req)
        else:
            name_reqs.add(req)
            name_req_projects.add(canonicalize_project_name(req.project_name))

    constraint_file_projects = {
        canonicalize_project_name(req.project_name)
        for req in constraints_file_reqs
    }
    # Constraints files must only contain name reqs, not URL reqs (those are already
    # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
    unconstrained_projects = name_req_projects - constraint_file_projects
    if unconstrained_projects:
        logger.warning(
            softwrap(f"""
                The constraints file {constraints_path} does not contain
                entries for the following requirements: {', '.join(unconstrained_projects)}.

                Ignoring `[python_setup].resolve_all_constraints` option.
                """))
        return OptionalPexRequest(None)

    interpreter_constraints = await Get(
        InterpreterConstraints,
        InterpreterConstraintsRequest,
        request.to_interpreter_constraints_request(),
    )

    # To get a full set of requirements we must add the URL requirements to the
    # constraints file, since the latter cannot contain URL requirements.
    # NB: We can only add the URL requirements we know about here, i.e., those that
    #  are transitive deps of the targets in play. There may be others in the repo.
    #  So we may end up creating a few different repository pexes, each with identical
    #  name requirements but different subsets of URL requirements. Fortunately since
    #  all these repository pexes will have identical pinned versions of everything,
    #  this is not a correctness issue, only a performance one.
    all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
    repository_pex = PexRequest(
        description=f"Resolving {constraints_path}",
        output_filename="repository.pex",
        internal_only=request.internal_only,
        requirements=PexRequirements(
            all_constraints,
            constraints_strings=(
                str(constraint)
                for constraint in global_requirement_constraints),
        ),
        # Monolithic PEXes like the repository PEX should always use the Packed layout.
        layout=PexLayout.PACKED,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        complete_platforms=request.complete_platforms,
        additional_args=request.additional_lockfile_args,
    )
    return OptionalPexRequest(repository_pex)
Esempio n. 24
0
async def generate_from_pipenv_requirement(
        request: GenerateFromPipenvRequirementsRequest,
        python_setup: PythonSetup) -> GeneratedTargets:
    generator = request.generator
    lock_rel_path = generator[PipenvSourceField].value
    lock_full_path = generator[PipenvSourceField].file_path
    overrides = {
        canonicalize_project_name(k): v
        for k, v in request.require_unparametrized_overrides().items()
    }

    file_tgt = TargetGeneratorSourcesHelperTarget(
        {TargetGeneratorSourcesHelperSourcesField.alias: [lock_rel_path]},
        Address(
            generator.address.spec_path,
            target_name=generator.address.target_name,
            relative_file_path=lock_rel_path,
        ),
    )

    digest_contents = await Get(
        DigestContents,
        PathGlobs(
            [lock_full_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=
            f"{generator}'s field `{PipenvSourceField.alias}`",
        ),
    )
    lock_info = json.loads(digest_contents[0].content)

    # Validate the resolve is legal.
    generator[PythonRequirementResolveField].normalized_value(python_setup)

    module_mapping = generator[ModuleMappingField].value
    stubs_mapping = generator[TypeStubsModuleMappingField].value
    inherited_fields = {
        field.alias: field.value
        for field in request.generator.field_values.values()
        if isinstance(field, (*COMMON_TARGET_FIELDS,
                              PythonRequirementResolveField))
    }

    def generate_tgt(raw_req: str, info: dict) -> PythonRequirementTarget:
        if info.get("extras"):
            raw_req += f"[{','.join(info['extras'])}]"
        raw_req += info.get("version", "")
        if info.get("markers"):
            raw_req += f";{info['markers']}"

        parsed_req = PipRequirement.parse(raw_req)
        normalized_proj_name = canonicalize_project_name(
            parsed_req.project_name)
        tgt_overrides = overrides.pop(normalized_proj_name, {})
        if Dependencies.alias in tgt_overrides:
            tgt_overrides[Dependencies.alias] = list(
                tgt_overrides[Dependencies.alias]) + [file_tgt.address.spec]

        return PythonRequirementTarget(
            {
                **inherited_fields,
                PythonRequirementsField.alias: [parsed_req],
                PythonRequirementModulesField.alias:
                module_mapping.get(normalized_proj_name),
                PythonRequirementTypeStubModulesField.alias:
                stubs_mapping.get(normalized_proj_name),
                # This may get overridden by `tgt_overrides`, which will have already added in
                # the file tgt.
                Dependencies.alias: [file_tgt.address.spec],
                **tgt_overrides,
            },
            generator.address.create_generated(parsed_req.project_name),
        )

    result = tuple(
        generate_tgt(req, info) for req, info in {
            **lock_info.get("default", {}),
            **lock_info.get("develop", {})
        }.items()) + (file_tgt, )

    if overrides:
        raise InvalidFieldException(
            f"Unused key in the `overrides` field for {request.generator.address}: "
            f"{sorted(overrides)}")

    return GeneratedTargets(generator, result)
Esempio n. 25
0
async def map_third_party_modules_to_addresses(
    all_python_tgts: AllPythonTargets, ) -> ThirdPartyPythonModuleMapping:
    modules_to_addresses: dict[str, Address] = {}
    modules_to_stub_addresses: dict[str, Address] = {}
    modules_with_multiple_owners: DefaultDict[str,
                                              set[Address]] = defaultdict(set)

    def add_modules(modules: tuple[str, ...], address: Address) -> None:
        for module in modules:
            if module in modules_with_multiple_owners:
                modules_with_multiple_owners[module].add(address)
            elif module in modules_to_addresses:
                modules_with_multiple_owners[module].update(
                    {modules_to_addresses[module], address})
            else:
                modules_to_addresses[module] = address

    def add_stub_modules(modules: tuple[str, ...], address: Address) -> None:
        for module in modules:
            if module in modules_with_multiple_owners:
                modules_with_multiple_owners[module].add(address)
            elif module in modules_to_stub_addresses:
                modules_with_multiple_owners[module].update(
                    {modules_to_stub_addresses[module], address})
            else:
                modules_to_stub_addresses[module] = address

    for tgt in all_python_tgts.third_party:
        explicit_modules = tgt.get(PythonRequirementModulesField).value
        if explicit_modules:
            add_modules(explicit_modules, tgt.address)
            continue

        explicit_stub_modules = tgt.get(
            PythonRequirementTypeStubModulesField).value
        if explicit_stub_modules:
            add_stub_modules(explicit_stub_modules, tgt.address)
            continue

        # Else, fall back to defaults.
        for req in tgt[PythonRequirementsField].value:
            # NB: We don't use `canonicalize_project_name()` for the fallback value because we
            # want to preserve `.` in the module name. See
            # https://www.python.org/dev/peps/pep-0503/#normalized-names.
            proj_name = canonicalize_project_name(req.project_name)
            fallback_value = req.project_name.strip().lower().replace("-", "_")

            in_stubs_map = proj_name in DEFAULT_TYPE_STUB_MODULE_MAPPING
            starts_with_prefix = fallback_value.startswith(
                ("types_", "stubs_"))
            ends_with_prefix = fallback_value.endswith(("_types", "_stubs"))
            if proj_name not in DEFAULT_MODULE_MAPPING and (
                    in_stubs_map or starts_with_prefix or ends_with_prefix):
                if in_stubs_map:
                    stub_modules = DEFAULT_TYPE_STUB_MODULE_MAPPING[proj_name]
                else:
                    stub_modules = (fallback_value[6:] if starts_with_prefix
                                    else fallback_value[:-6], )
                add_stub_modules(stub_modules, tgt.address)
            else:
                add_modules(
                    DEFAULT_MODULE_MAPPING.get(proj_name, (fallback_value, )),
                    tgt.address)

    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        if module in modules_to_addresses:
            modules_to_addresses.pop(module)
        if module in modules_to_stub_addresses:
            modules_to_stub_addresses.pop(module)

    merged_mapping: DefaultDict[str, list[Address]] = defaultdict(list)
    for k, v in modules_to_addresses.items():
        merged_mapping[k].append(v)
    for k, v in modules_to_stub_addresses.items():
        merged_mapping[k].append(v)

    return ThirdPartyPythonModuleMapping(
        mapping=FrozenDict(
            (k, tuple(sorted(v))) for k, v in sorted(merged_mapping.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(modules_with_multiple_owners.items())),
    )