def add_requirements_from_recipe_or_output(yaml_data):
     for s in get_in(["requirements", "host"], yaml_data, []):
         add_spec(s, "main")
     for s in get_in(["requirements", "run"], yaml_data, []):
         add_spec(s, "main")
     for s in get_in(["test", "requires"], yaml_data, []):
         add_spec(s, "dev")
Beispiel #2
0
def parse_poetry_pyproject_toml(
    pyproject_toml: pathlib.Path, platform: str, include_dev_dependencies: bool
) -> LockSpecification:
    contents = toml.load(pyproject_toml)
    specs: List[str] = []
    dependency_sections = ["dependencies"]
    if include_dev_dependencies:
        dependency_sections.append("dev-dependencies")

    for key in dependency_sections:
        deps = get_in(["tool", "poetry", key], contents, {})
        for depname, depattrs in deps.items():
            conda_dep_name = normalize_pypi_name(depname)
            if isinstance(depattrs, collections.Mapping):
                poetry_version_spec = depattrs["version"]
                # TODO: support additional features such as markers for things like sys_platform, platform_system
            elif isinstance(depattrs, str):
                poetry_version_spec = depattrs
            else:
                raise TypeError(
                    f"Unsupported type for dependency: {depname}: {depattrs:r}"
                )
            conda_version = poetry_version_to_conda_version(poetry_version_spec)
            spec = to_match_spec(conda_dep_name, conda_version)

            if conda_dep_name == "python":
                specs.insert(0, spec)
            else:
                specs.append(spec)

    channels = get_in(["tool", "conda-lock", "channels"], contents, [])

    return LockSpecification(specs=specs, channels=channels, platform=platform)
Beispiel #3
0
def parse_requirements_pyproject_toml(
    pyproject_toml_path: pathlib.Path,
    contents: Mapping[str, Any],
    prefix: Sequence[str],
    main_tag: str,
    optional_tag: str,
    dev_tags: AbstractSet[str] = {"dev", "test"},
):
    """
    PEP621 and flit
    """
    dependencies: List[Dependency] = []

    sections = {(*prefix, main_tag): "main"}
    for extra in dev_tags:
        sections[(*prefix, optional_tag, extra)] = "dev"
    for extra in set(get_in([*prefix, optional_tag], contents,
                            {}).keys()).difference(dev_tags):
        sections[(*prefix, optional_tag, extra)] = extra

    for path, category in sections.items():
        for dep in get_in(list(path), contents, []):
            dependencies.append(
                parse_python_requirement(dep,
                                         manager="conda",
                                         category=category,
                                         optional=category != "main"))

    return specification_with_dependencies(pyproject_toml_path, contents,
                                           dependencies)
Beispiel #4
0
def parse_pyproject_toml(pyproject_toml: pathlib.Path, ) -> LockSpecification:
    contents = toml.load(pyproject_toml)
    build_system = get_in(["build-system", "build-backend"], contents)
    pep_621_probe = get_in(["project", "dependencies"], contents)
    parse = parse_poetry_pyproject_toml
    if pep_621_probe is not None:
        parse = partial(
            parse_requirements_pyproject_toml,
            prefix=("project", ),
            main_tag="dependencies",
            optional_tag="optional-dependencies",
        )
    elif build_system.startswith("poetry"):
        parse = parse_poetry_pyproject_toml
    elif build_system.startswith("flit"):
        parse = partial(
            parse_requirements_pyproject_toml,
            prefix=("tool", "flit", "metadata"),
            main_tag="requires",
            optional_tag="requires-extra",
        )
    else:
        import warnings

        warnings.warn(
            "Could not detect build-system in pyproject.toml.  Assuming poetry"
        )

    return parse(pyproject_toml, contents)
Beispiel #5
0
def specification_with_dependencies(
        path: pathlib.Path, toml_contents: Mapping[str, Any],
        dependencies: List[Dependency]) -> LockSpecification:
    for depname, depattrs in get_in(["tool", "conda-lock", "dependencies"],
                                    toml_contents, {}).items():
        if isinstance(depattrs, str):
            conda_version = depattrs
        else:
            raise TypeError(
                f"Unsupported type for dependency: {depname}: {depattrs:r}")
        dependencies.append(
            VersionedDependency(
                name=depname,
                version=conda_version,
                manager="conda",
                optional=False,
                category="main",
                extras=[],
            ))

    return LockSpecification(
        dependencies,
        channels=get_in(["tool", "conda-lock", "channels"], toml_contents, []),
        platforms=get_in(["tool", "conda-lock", "platforms"], toml_contents,
                         []),
        sources=[path],
    )
Beispiel #6
0
def parse_flit_pyproject_toml(pyproject_toml: pathlib.Path, platform: str,
                              include_dev_dependencies: bool):
    contents = toml.load(pyproject_toml)

    requirements = get_in(["tool", "flit", "metadata", "requires"], contents,
                          [])
    if include_dev_dependencies:
        requirements += get_in(
            ["tool", "flit", "metadata", "requires-extra", "test"], contents,
            [])
        requirements += get_in(
            ["tool", "flit", "metadata", "requires-extra", "dev"], contents,
            [])

    dependency_sections = ["tool"]
    if include_dev_dependencies:
        dependency_sections += ["dev-dependencies"]

    specs = [python_requirement_to_conda_spec(req) for req in requirements]

    conda_deps = get_in(["tool", "conda-lock", "dependencies"], contents, {})
    specs.extend(parse_conda_dependencies(conda_deps))

    channels = get_in(["tool", "conda-lock", "channels"], contents, [])

    return LockSpecification(specs=specs, channels=channels, platform=platform)
Beispiel #7
0
 def add_requirements_from_recipe_or_output(yaml_data):
     for s in get_in(["requirements", "host"], yaml_data, []):
         add_spec(s)
     for s in get_in(["requirements", "run"], yaml_data, []):
         add_spec(s)
     if include_dev_dependencies:
         for s in get_in(["test", "requires"], yaml_data, []):
             add_spec(s)
def _parse_meta_yaml_file_for_platform(
    meta_yaml_file: pathlib.Path,
    platform: str,
) -> LockSpecification:
    """Parse a simple meta-yaml file for dependencies, assuming the target platform.

    * This does not support multi-output files and will ignore all lines with selectors other than platform
    """
    if not meta_yaml_file.exists():
        raise FileNotFoundError(f"{meta_yaml_file} not found")

    with meta_yaml_file.open("r") as fo:
        filtered_recipe = "\n".join(
            filter_platform_selectors(fo.read(), platform=platform)
        )
        t = jinja2.Template(filtered_recipe, undefined=UndefinedNeverFail)
        rendered = t.render()

        meta_yaml_data = yaml.safe_load(rendered)

    channels = get_in(["extra", "channels"], meta_yaml_data, [])
    depenencies: List[Dependency] = []

    def add_spec(spec: str, category: str):
        if spec is None:
            return
        # TODO: This does not parse conda requirements with build strings
        dep = parse_python_requirement(
            spec,
            manager="conda",
            optional=category != "main",
            category=category,
            normalize_name=False,
        )
        dep.selectors.platform = [platform]
        depenencies.append(dep)

    def add_requirements_from_recipe_or_output(yaml_data):
        for s in get_in(["requirements", "host"], yaml_data, []):
            add_spec(s, "main")
        for s in get_in(["requirements", "run"], yaml_data, []):
            add_spec(s, "main")
        for s in get_in(["test", "requires"], yaml_data, []):
            add_spec(s, "dev")

    add_requirements_from_recipe_or_output(meta_yaml_data)
    for output in get_in(["outputs"], meta_yaml_data, []):
        add_requirements_from_recipe_or_output(output)

    return LockSpecification(
        dependencies=depenencies,
        channels=channels,
        platforms=[platform],
        sources=[meta_yaml_file],
    )
Beispiel #9
0
def parse_meta_yaml_file(meta_yaml_file: pathlib.Path, platform: str,
                         include_dev_dependencies: bool) -> LockSpecification:
    """Parse a simple meta-yaml file for dependencies.

    * This does not support multi-output files and will ignore all lines with selectors
    """
    if not meta_yaml_file.exists():
        raise FileNotFoundError(f"{meta_yaml_file} not found")

    with meta_yaml_file.open("r") as fo:
        filtered_recipe = "\n".join(
            filter_platform_selectors(fo.read(), platform=platform))
        t = jinja2.Template(filtered_recipe, undefined=UndefinedNeverFail)
        rendered = t.render()

        meta_yaml_data = yaml.safe_load(rendered)

    channels = get_in(["extra", "channels"], meta_yaml_data, [])
    specs = []

    def add_spec(spec):
        if spec is None:
            return
        specs.append(spec)

    def add_requirements_from_recipe_or_output(yaml_data):
        for s in get_in(["requirements", "host"], yaml_data, []):
            add_spec(s)
        for s in get_in(["requirements", "run"], yaml_data, []):
            add_spec(s)
        if include_dev_dependencies:
            for s in get_in(["test", "requires"], yaml_data, []):
                add_spec(s)

    add_requirements_from_recipe_or_output(meta_yaml_data)
    for output in get_in(["outputs"], meta_yaml_data, []):
        add_requirements_from_recipe_or_output(output)

    return LockSpecification(specs=specs, channels=channels, platform=platform)
Beispiel #10
0
def parse_pyproject_toml(pyproject_toml: pathlib.Path, platform: str,
                         include_dev_dependencies: bool):
    contents = toml.load(pyproject_toml)
    build_system = get_in(["build-system", "build-backend"], contents)
    parse = parse_poetry_pyproject_toml
    if build_system.startswith("poetry"):
        parse = parse_poetry_pyproject_toml
    elif build_system.startswith("flit"):
        parse = parse_flit_pyproject_toml
    else:
        import warnings

        warnings.warn(
            "Could not detect build-system in pyproject.toml.  Assuming poetry"
        )

    return parse(pyproject_toml, platform, include_dev_dependencies)
Beispiel #11
0
def parse_poetry_pyproject_toml(
    path: pathlib.Path,
    contents: Mapping[str, Any],
) -> LockSpecification:
    """
    Parse dependencies from a poetry pyproject.toml file

    Each dependency is assigned a category depending on which section it appears in:
    * dependencies in [tool.poetry.dependencies] have category main
    * dependencies in [tool.poetry.dev-dependencies] have category dev
    * dependencies in each `key` of [tool.poetry.extras] have category `key`

    * By default, dependency names are translated to the conda equivalent, with two exceptions:
        - If a dependency has `source = "pypi"`, it is treated as a pip dependency (by name)
        - If a dependency has a url, it is treated as a direct pip dependency (by url)

    * markers are not supported

    """
    dependencies: List[Dependency] = []

    categories = {"dependencies": "main", "dev-dependencies": "dev"}

    dep_to_extra = {}
    for category, deps in get_in(["tool", "poetry", "extras"], contents,
                                 {}).items():
        for dep in deps:
            dep_to_extra[dep] = category

    for section, default_category in categories.items():
        for depname, depattrs in get_in(["tool", "poetry", section], contents,
                                        {}).items():
            category = dep_to_extra.get(depname) or default_category
            optional = category != "main"
            manager: Literal["conda", "pip"] = "conda"
            url = None
            extras = []
            if isinstance(depattrs, collections.abc.Mapping):
                poetry_version_spec = depattrs.get("version", None)
                url = depattrs.get("url", None)
                optional = depattrs.get("optional", False)
                extras = depattrs.get("extras", [])
                # If a dependency is explicitly marked as sourced from pypi,
                # or is a URL dependency, delegate to the pip section
                if (depattrs.get("source", None) == "pypi"
                        or poetry_version_spec is None):
                    manager = "pip"
                # TODO: support additional features such as markers for things like sys_platform, platform_system
            elif isinstance(depattrs, str):
                poetry_version_spec = depattrs
            else:
                raise TypeError(
                    f"Unsupported type for dependency: {depname}: {depattrs}")
            if manager == "conda":
                name = normalize_pypi_name(depname)
                version = poetry_version_to_conda_version(poetry_version_spec)
            else:
                name = depname
                version = poetry_version_spec
            if version is None:
                if url is None:
                    raise ValueError(
                        f"dependency {depname} has neither version nor url")
                url, hashes = urldefrag(url)
                dependencies.append(
                    URLDependency(
                        name=name,
                        url=url,
                        hashes=[hashes],
                        manager=manager,
                        optional=optional,
                        category=category,
                        extras=extras,
                    ))
            else:
                dependencies.append(
                    VersionedDependency(
                        name=name,
                        version=version,
                        manager=manager,
                        optional=optional,
                        category=category,
                        extras=extras,
                    ))

    return specification_with_dependencies(path, contents, dependencies)