Example #1
0
    def _convert_deps(*, deps, name, version, extra):

        # filter result
        result = []
        for dep in deps:
            try:
                req = Requirement(dep)
            except InvalidRequirement as e:
                msg = 'cannot parse requirement: {} from {} {}'
                try:
                    # try to parse with dropped out markers
                    req = Requirement(dep.split(';')[0])
                except InvalidRequirement:
                    raise ValueError(msg.format(dep, name, version)) from e
                else:
                    logger.warning('cannot parse marker', extra=dict(
                        requirement=dep,
                        source_name=name,
                        source_version=version,
                    ))

            try:
                dep_extra = req.marker and Markers(req.marker).extra
            except ValueError:  # unsupported operation for version marker python_version: in
                dep_extra = None

            # it's not extra and we want not extra too
            if dep_extra is None and extra is None:
                result.append(req)
                continue
            # it's extra, but we want not the extra
            # or it's not the extra, but we want extra.
            if dep_extra is None or extra is None:
                continue
            # it's extra and we want this extra
            elif dep_extra == extra:
                result.append(req)
                continue

        return tuple(result)
def parse_requirement(requirement):
    """Parse a single requirement.

    It will handle
    - requirements handled by packaging.requirements.Requirement
    - URLs, which are turned into PEP 508 URL specifications (using the #egg
      fragment to identify the package).

    Anything not handled is returned as a string (with a warning).

    >>> parse_requirement('requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"')
    <Requirement('requests[security,tests]==2.8.*,>=2.8.1; python_version < "2.7"')>
    >>> parse_requirement('https://github.com/pypa/pip/archive/1.3.1.zip#egg=pip')
    <Requirement('pip@ https://github.com/pypa/pip/archive/1.3.1.zip#egg=pip')>
    >>> parse_requirement('git+ssh://[email protected]/ska-sa/katdal')
    <Requirement('katdal@ git+ssh://[email protected]/ska-sa/katdal')>
    >>> parse_requirement('--no-binary :all:')
    '--no-binary :all:'

    Returns
    -------
    :class:`packaging.requirements.Requirement` or str
        The parsed requirement
    """
    try:
        return Requirement(requirement)
    except ValueError:
        if '://' not in requirement:
            warnings.warn(
                'Requirement {} could not be parsed and is not an URL'.format(
                    requirement))
            return requirement
        url = urllib.parse.urlparse(requirement)
        frag_params = urllib.parse.parse_qs(url.fragment)
        if 'egg' in frag_params:
            name = frag_params['egg'][0]
        else:
            name = url.path.split('/')[-1]
        requirement = name + ' @ ' + requirement
        return Requirement(requirement)
Example #3
0
def test_InstallError():
    with pytest.raises(
            InstallError,
            match=
            "Could not install the given requirements: 'pytest', 'flake8', 'black' and 'pip'",
    ):
        raise InstallError("pytest", "flake8", "black", "pip")

    with pytest.raises(
            InstallError,
            match=
            "Could not install the given requirements: 'pytest', 'flake8', 'black' and 'pip'",
    ):
        raise InstallError(ComparableRequirement("pytest"),
                           ComparableRequirement("flake8"),
                           ComparableRequirement("black"),
                           ComparableRequirement("pip"))

    with pytest.raises(
            InstallError,
            match=
            "Could not install the given requirements: 'pytest', 'flake8', 'black' and 'pip'",
    ):
        raise InstallError(Requirement("pytest"), Requirement("flake8"),
                           Requirement("black"), Requirement("pip"))

    with pytest.raises(
            InstallError,
            match=
            "Could not install the given requirements: 'pytest', 'flake8', 'black' and 'pip'",
    ):
        raise InstallError(ComparableRequirement("pytest"),
                           Requirement("flake8"), "black", Requirement("pip"))
Example #4
0
def _transform_python_requirements(
    requirements_list: List[str],
    repo_url: str,
    out_requirements_files: List[Dict],
) -> None:
    """
    Helper function to perform data transformations on an arbitrary list of requirements.
    :param requirements_list: List[str]: List of requirements
    :param repo_url: str: The URL of the GitHub repo.
    :param out_requirements_files: Output array to append transformed results to.
    :return: Nothing.
    """
    parsed_list = []
    for line in requirements_list:
        stripped_line = line.partition('#')[0].strip()
        if stripped_line == '':
            continue
        try:
            req = Requirement(stripped_line)
        except InvalidRequirement:
            # INFO and not WARN/ERROR as we intentionally don't support all ways to specify Python requirements
            logger.info(
                f"Failed to parse line \"{line}\" in repo {repo_url}'s requirements.txt; skipping line.",
                exc_info=True,
            )
            continue
        parsed_list.append(req)

    for req in parsed_list:
        pinned_version = None
        if len(req.specifier) == 1:
            specifier = next(iter(req.specifier))
            if specifier.operator == '==':
                pinned_version = specifier.version

        # Set `spec` to a default value. Example values for str(req.specifier): "<4.0,>=3.0" or "==1.0.0".
        spec: Optional[str] = str(req.specifier)
        # Set spec to `None` instead of empty string so that the Neo4j driver will leave the library.specifier field
        # undefined. As convention, we prefer undefined values over empty strings in the graph.
        if spec == '':
            spec = None

        canon_name = canonicalize_name(req.name)
        requirement_id = f"{canon_name}|{pinned_version}" if pinned_version else canon_name

        out_requirements_files.append({
            "id": requirement_id,
            "name": canon_name,
            "specifier": spec,
            "version": pinned_version,
            "repo_url": repo_url,
        })
Example #5
0
def test_resolve_requirement_versions():
    package1 = Requirement('test-package[ext1]>=1.1.1; extra == "group1"')
    package2 = Requirement('test-package[ext2]<=2.0.0; extra == "group2"')
    assert str(resolve_requirement_versions([package1, package2])) == str(
        Requirement(
            'test-package[ext1,ext2]<=2.0.0,>=1.1.1; extra == "group1" or extra == "group2"'
        ))

    package3 = Requirement('test-package==1.1.0; extra == "group3"')
    package4 = Requirement('test-package==1.1.0; extra == "group4"')
    assert str(resolve_requirement_versions([package3, package4])) == str(
        Requirement(
            'test-package==1.1.0; extra == "group3" or extra == "group4"'))

    package5 = Requirement('test-package; extra == "group5"')
    package6 = Requirement('test-package[ext3]@https://foo.com')
    assert str(resolve_requirement_versions([package5, package6])) == str(
        Requirement('test-package[ext3]@ https://foo.com ; extra == "group5"'))
Example #6
0
def run(specs, requirements_file, upgrade_versions, *args, **kwargs):
    if not specs:  # then, assume all in the requirements file
        regex = re.compile(r"(^|\n|\n\r).*==")
        specs = []
        previous_versions = {}
        with open(requirements_file) as f:
            for line in f:
                if regex.search(line) and not line.lstrip().startswith("#"):
                    req = Requirement(line.split("\\")[0])
                    # Deliberately strip the specifier (aka. the version)
                    if upgrade_versions:
                        version = req.specifier
                        req.specifier = None
                        previous_versions[str(req)] = version
                    specs.append(str(req))
        if upgrade_versions:
            kwargs["previous_versions"] = previous_versions

    if isinstance(specs, str):
        specs = [specs]

    return run_packages(specs, requirements_file, *args, **kwargs)
Example #7
0
 def dependencies(self):
     print(f"Looking for dependencies: {self}")
     self.get_metadata()
     if self.metadata:
         deps = self.metadata.get_all("Requires-Dist", [])
         extras = self.extras if self.extras else ['']
         print(f"{self.name} -> {deps}")
         for d in deps:
             r = Requirement(d)
             if r.marker is None:
                 yield r
             elif len(self.extras) == 0:
                 if r.marker.evaluate({'extra': ''}):
                     yield r
             else:
                 for e in self.extras:
                     if r.marker.evaluate({'extra': e}):
                         yield r
     # If we have extras, we also depend on the base package having
     # the same version
     if self.extras:
         yield Requirement(f"{self.name} == {self.version}")
Example #8
0
def collate_python_requirements(modules):
  # type: (List[libtbx.env_config.module]) -> List[packaging.requirements.Requirement]
  """Combine python requirements from a module list.

  An attempt will be made to merge any joint requirements. The requirement
  objects will have an added property 'modules', which is a set of module
  names that formed the requirement.

  Attr:
      modules (Iterable[libtbx.env_config.module]): The module list

  Returns:
      List[packaging.requirements.Requirement]: The merged requirements
  """
  requirements = []
  for module, spec in itertools.chain(*[[(x.name, y) for y in x.python_required] for x in modules if hasattr(x, "python_required")]):
    requirement = Requirement(spec)
    # Track where dependencies came from
    requirement.modules = {module}
    # Attempt to merge this with any other requirements to avoid double-specifying
    _merge_requirements(requirements, requirement)
  return requirements
async def test_parse_requirements_nixpkgs_with_no_source():
    data = await load_nixpkgs_data(PINNED_NIXPKGS_ARGS)
    nixpkgs = NixpkgsData(data)
    pypi = PyPIData(PyPICache())

    async def f(pkg):
        return await evaluate_package_requirements(pkg, PINNED_NIXPKGS_ARGS)

    c = VersionChooser(nixpkgs, pypi, f)
    await c.require(Requirement('pycrypto'))
    pkg = c.package_for('pycrypto')
    assert pkg is not None
    await pkg.metadata()
Example #10
0
def valid_pypi_name(package_spec: str) -> Optional[str]:
    try:
        package_req = Requirement(package_spec)
    except InvalidRequirement:
        # not a valid PEP508 package specification
        return None

    if package_req.url:
        # package name supplied by user might not match package found in URL,
        #   so force package name determination the long way
        return None

    return canonicalize_name(package_req.name)
Example #11
0
def _parse_requirement(app_requirement):
    try:
        return Requirement(app_requirement)
    except InvalidRequirement as error:
        from urllib.parse import urlsplit

        if urlsplit(app_requirement).scheme:
            raise click.ClickException(
                "It looks like you tried to install directly from a PEP 508 "
                "compliant URL. Make sure to use the 'app-name@url' syntax "
                "to provide a name for the app.")
        raise click.ClickException(
            f"Invalid requirement '{app_requirement}': {error!s}\n")
async def test_packages_with_markers_in_extras_require():
    data = await load_nixpkgs_data(PINNED_NIXPKGS_ARGS)
    nixpkgs = NixpkgsData(data)
    pypi = PyPIData(PyPICache())

    async def f(pkg):
        return await evaluate_package_requirements(pkg, PINNED_NIXPKGS_ARGS)

    c = VersionChooser(nixpkgs, pypi, f)
    await c.require(Requirement('doit'))
    assert c.package_for('doit')
    assert c.package_for('pyinotify')
    assert c.package_for('macfsevents') is None
async def test_build_sampleproject_nixpkgs():
    data = await load_nixpkgs_data(PINNED_NIXPKGS_ARGS)
    nixpkgs = NixpkgsData(data)
    pypi = PyPIData(PyPICache())

    async def f(pkg):
        return await evaluate_package_requirements(pkg, PINNED_NIXPKGS_ARGS)

    c = VersionChooser(nixpkgs, pypi, f)
    await c.require(Requirement('pytest'))
    await c.require(Requirement('sampleproject==1.3.1'))
    package: PyPIPackage = c.package_for('sampleproject')  # type: ignore
    sha256 = await get_path_hash(await package.source())
    reqs = ChosenPackageRequirements(
        build_requirements=[],
        test_requirements=[c.package_for('pytest')],  # type: ignore
        runtime_requirements=[c.package_for('peppercorn')]  # type: ignore
    )
    package.version = Version('1.2.3')
    meta = await package.metadata()
    assert package.version == Version('1.3.1')
    sampleproject_expr = build_nix_expression(package, reqs, meta, sha256)

    with tempfile.NamedTemporaryFile(suffix='.nix') as fp:
        fp.write(sampleproject_expr.encode())
        fp.flush()
        nixpkgs_expr = build_overlayed_nixpkgs(
            {'sampleproject': Path(fp.name)})
        print(nixpkgs_expr)
        wrapper_expr = f"""(({nixpkgs_expr}) {{}}).python3.pkgs.sampleproject"""
        result = await run_nix_build(wrapper_expr)

    assert 'default.nix' not in nixpkgs_expr

    proc = await asyncio.create_subprocess_shell(
        f'{result}/bin/sample', stdout=asyncio.subprocess.PIPE)
    stdout, stderr = await proc.communicate()
    assert (await proc.wait()) == 0
    assert b'Call your main application code here' in stdout
Example #14
0
 def from_result_path(cls, result_path: Path):
     attr_mapping = {
         'build_requirements': Path('setup_requires.txt'),
         'test_requirements': Path('tests_requires.txt'),
         'runtime_requirements': Path('install_requires.txt'),
     }
     kwargs = {}
     for (attr, filename) in attr_mapping.items():
         with (result_path / filename).open() as fp:
             # Convert from Requirement.parse to Requirement
             reqs = [Requirement(str(r)) for r in parse_requirements(fp)]
             kwargs[attr] = reqs
     return cls(**kwargs)
Example #15
0
def install(packages: List[str],
            install_scripts: str = None,
            upgrade: bool = False,
            latest: bool = False,
            deps: bool = True) -> List[str]:
    """Install packages and return a list of collected package names.
    
    :arg packages: A list of package name, which may include the version specifier. It can also be a URL.
    :arg install_scripts: Install scripts to a different folder. It uses
        the ``--install-option="--install-scripts=..."`` pip option.
    :arg upgrade: Upgrade package.
    :arg latest: Whether upgrade to the latest version. Otherwise upgrade
        to the compatible version. This option has no effect if ``package``
        includes specifiers.
    :arg deps: Whether to install dependencies.
    """
    cmd = "install"

    if install_scripts:
        cmd += " --install-option \"--install-scripts={}\"".format(
            install_scripts)
    if upgrade:
        cmd += " -U --upgrade-strategy eager"
    if not deps:
        cmd += " --no-deps"

    need_info = []
    for i, pkg in enumerate(packages):
        if upgrade and not pkg.startswith(
                "http") and not latest and not Requirement(pkg).specifier:
            # compatible update. find current version
            need_info.append((i, pkg))
    result = show([v for k, v in need_info])
    if len(result) != len(need_info):
        installed = set(r.name for r in result)
        needed = set(v for k, v in need_info)
        missing = list(needed - installed)
        raise Exception(
            f"Upgrade error: some packages are not installed: {', '.join(missing)}"
        )
    for info, (i, pkg) in zip(result, need_info):
        packages[i] = f"{pkg}~={get_compatible_version(info.version)}"

    cmd = f"{cmd} {' '.join(packages)}"
    collected = []
    for line in execute_pip(cmd, capture=True):
        print(line, end="")
        match = re.match("Installing collected packages:(.+)", line, re.I)
        if match:
            collected = [p.strip() for p in match.group(1).split(",")]
    return collected
Example #16
0
def main(
    filesystem,
    link_dir,
    name,
    locator,
    installs,
    links,
    requirements,
    recreate,
    virtualenv_args,
    persist,
):
    """
    Create a new ad hoc virtualenv.
    """
    if name:
        virtualenv = locator.for_name(name=name)
    elif len(installs) == 1:
        # When there's just one package to install, default to using that name.
        requirement, = installs
        name = Requirement(requirement).name
        virtualenv = locator.for_name(name=name)
    elif installs:
        raise click.BadParameter("A name is required.")
    elif len(links) == 1:
        # When there's just one binary to link, go for the gold.
        name, = installs = links
        virtualenv = locator.for_name(name=name)
    else:
        virtualenv = locator.for_directory(directory=Path.cwd())

    if recreate:
        act = partial(virtualenv.recreate_on, filesystem=filesystem)
    else:
        act = virtualenv.create

    act(arguments=virtualenv_args)
    virtualenv.install(packages=installs, requirements=requirements)

    for link in links:
        filesystem.link(source=virtualenv.binary(name=link),
                        to=link_dir.descendant(link))

    if persist:
        _config.add_virtualenv(
            filesystem=filesystem,
            locator=locator,
            installs=installs,
            links=links,
            name=name,
        )
Example #17
0
def generate_requirements(
    wheel_file,
    requirements_file,
    extra_packages,
    separate_extras=False,
    third_party_only=False,
):
    metadata = _get_metadata(wheel_file)
    requirements = []
    requirements_extras = defaultdict(list)
    for meta_field in metadata.split("\n"):
        if not meta_field.startswith("Requires-Dist:"):
            continue

        meta_field = meta_field[len("Requires-Dist:"):]
        req = Requirement(meta_field)
        if third_party_only and req.name.startswith("dbnd"):
            print("Skipping %s" % req.name)
            continue

        print(req)
        parts = [req.name]
        if req.extras:
            parts.append("[{0}]".format(",".join(sorted(req.extras))))

        if req.specifier:
            parts.append(str(req.specifier))
        req_str = "".join(parts)
        if req.marker:
            if str(req.marker).startswith("extra"):
                print("Extras marker:", req.marker)
                requirements_extras[str(req.marker)].append(req_str)
                continue
            req_str = "; ".join([req_str, str(req.marker)])
        requirements.append(req_str)

    for extra_name in extra_packages:
        extras = requirements_extras.get('extra == "%s"' % extra_name, [])
        if separate_extras and extras:
            extras_filename = requirements_file + "[%s]" % extra_name
            if ".requirements.txt" in requirements_file:
                extras_filename = requirements_file.replace(
                    ".requirements.txt", "[%s].requirements.txt" % extra_name)

            save_to_file(extras_filename, extras)
            continue
        for p in extras:
            if p not in requirements:
                requirements.append(p)

    save_to_file(requirements_file, requirements)
Example #18
0
async def test_require_local_package():
    nixpkgs = NixpkgsData(NIXPKGS_JSON)
    pypi = PyPIData(DummyCache(sampleproject=SAMPLEPROJECT_DATA))
    reqs_f = dummy_package_requirements({
        "sampleproject": ([], [], [Requirement('flask')]),
    })
    c = VersionChooser(nixpkgs, pypi, reqs_f)
    await c.require_local('sampleproject', Path('/src'))
    sampleproject = c.package_for('sampleproject')
    assert sampleproject is not None
    assert isinstance(sampleproject, PyPIPackage)
    assert c.package_for('flask')
    src = await sampleproject.source()
    assert src == Path('/src')
Example #19
0
async def test_chosen_package_requirements(load_tests, require_pytest):
    nixpkgs = NixpkgsData(NIXPKGS_JSON)
    pypi = PyPIData(DummyCache(sampleproject=SAMPLEPROJECT_DATA))
    reqs_f = dummy_package_requirements({
        "sampleproject": ([], [Requirement('pytest')], [Requirement('flask')]),
    })
    c = VersionChooser(nixpkgs,
                       pypi,
                       reqs_f,
                       should_load_tests=lambda _: load_tests)
    if require_pytest:
        await c.require(Requirement('pytest'))
    await c.require(Requirement('sampleproject'))
    sampleproject = c.package_for('sampleproject')
    reqs: PackageRequirements = await reqs_f(sampleproject)

    chosen: ChosenPackageRequirements
    chosen = ChosenPackageRequirements.from_package_requirements(
        reqs, c, load_tests=load_tests)

    assert len(chosen.runtime_requirements) == 1
    assert len(chosen.test_requirements) == int(load_tests)
    assert chosen.runtime_requirements[0] is c.package_for('flask')
Example #20
0
def _transform_python_requirements(req_file_contents: Dict, repo_url: str,
                                   out_requirements_files: List[Dict]) -> None:
    """
    Performs data transformations for the requirements.txt files in a GitHub repo, if available.
    :param req_file_contents: str: The text contents of the requirements file.
    :param repo_url: str: The URL of the GitHub repo.
    :param out_requirements_files: Output array to append transformed results to.
    :return: Nothing.
    """
    if req_file_contents and req_file_contents.get('text'):
        text_contents = req_file_contents['text']

        parsed_list = []
        for line in text_contents.split("\n"):
            try:
                # Remove trailing comments and extra whitespace
                line = line.partition('#')[0].strip()
                req = Requirement(line)
                parsed_list.append(req)
            except InvalidRequirement as e:
                logger.info(
                    f"Failed to parse line \"{line}\" in repo {repo_url}'s requirements.txt; skipping line. "
                    f"Details: {e}. This is probably ok since we don't support all ways to specify Python "
                    f"requirements.", )
                continue

        for req in parsed_list:
            pinned_version = None
            if len(req.specifier) == 1:
                specifier = next(iter(req.specifier))
                if specifier.operator == '==':
                    pinned_version = specifier.version

            # Set `spec` to a default value. Example values for str(req.specifier): "<4.0,>=3.0" or "==1.0.0".
            spec: Optional[str] = str(req.specifier)
            # Set spec to `None` instead of empty string so that the Neo4j driver will leave the library.specifier field
            # undefined. As convention, we prefer undefined values over empty strings in the graph.
            if spec == '':
                spec = None

            canon_name = canonicalize_name(req.name)
            requirement_id = f"{canon_name}|{pinned_version}" if pinned_version else canon_name

            out_requirements_files.append({
                "id": requirement_id,
                "name": canon_name,
                "specifier": spec,
                "version": pinned_version,
                "repo_url": repo_url,
            })
Example #21
0
def tox_tested_django_versions(tox_ini):
    tox_config = tox.config.parseconfig(['-c', str(tox_ini.abspath())])
    tested_versions = set()

    for env, config in tox_config.envconfigs.items():
        for dep in config.deps:
            if requirement_is_django(dep.name):
                req = Requirement(dep.name)
                tested_versions |= {
                    version
                    for version in ['1.8', '1.9', '1.10', '1.11', '2.0']
                    if version in req.specifier
                }
    return tested_versions
Example #22
0
    def _get_dependencies(self):
        num_of_extras = len(self.extras)

        if num_of_extras == 0:
            # There are no extras, return the top-level requirements
            return self._dependencies[None]
        elif num_of_extras == 1:
            extra_name = list(self.extras)[0]  # XXX: Check if this is _slow_?
            # We are "simple" extra requirement, depend on the matching package
            # name-version pair and extra dependencies.
            retval = [Requirement("{} == {}".format(self.name, self.version))]
            if extra_name in self._dependencies:
                retval.extend(self._dependencies[extra_name])
            return retval
        else:
            # Short hands
            name = self.name
            version = self.version

            return [
                Requirement("{}[{}] == {}".format(name, extra_name, version))
                for extra_name in self.extras
            ]
async def test_metadata(version_chooser):
    await version_chooser.require(Requirement("sampleproject"))
    desc = "${builtins.abort builtins.currentSystem}"
    result = build_nix_expression(version_chooser.package_for('sampleproject'),
                                  NO_REQUIREMENTS,
                                  sha256='aaaaaa',
                                  metadata=PackageMetadata(
                                      description=desc,
                                      url=None,
                                      license=None,
                                  ))
    assert await nix_instantiate(result,
                                 attr='meta.description',
                                 **DEFAULT_ARGS) == desc
Example #24
0
    async def add_requirement(self, requirement: str, ctx, transaction):
        """Add a requirement to the transaction.

        See PEP 508 for a description of the requirements.
        https://www.python.org/dev/peps/pep-0508
        """
        if requirement.endswith(".whl"):
            # custom download location
            name, wheel, version = _parse_wheel_url(requirement)
            transaction["wheels"].append((name, wheel, version))
            return

        req = Requirement(requirement)

        # If there's a Pyodide package that matches the version constraint, use
        # the Pyodide package instead of the one on PyPI
        if (
            req.name in self.builtin_packages
            and self.builtin_packages[req.name] in req.specifier
        ):
            version = self.builtin_packages[req.name]
            transaction["pyodide_packages"].append((req.name, version))
            return

        if req.marker:
            # handle environment markers
            # https://www.python.org/dev/peps/pep-0508/#environment-markers
            if not req.marker.evaluate(ctx):
                return

        # Is some version of this package is already installed?
        if req.name in transaction["locked"]:
            ver = transaction["locked"][req.name]
            if ver in req.specifier:
                # installed version matches, nothing to do
                return
            else:
                raise ValueError(
                    f"Requested '{requirement}', "
                    f"but {req.name}=={ver} is already installed"
                )
        metadata = await _get_pypi_json(req.name)
        wheel, ver = self.find_wheel(metadata, req)
        transaction["locked"][req.name] = ver

        recurs_reqs = metadata.get("info", {}).get("requires_dist") or []
        for recurs_req in recurs_reqs:
            await self.add_requirement(recurs_req, ctx, transaction)

        transaction["wheels"].append((req.name, wheel, ver))
Example #25
0
def _generic_dependencies() -> Iterable[Dependency]:
    """Yield pairs (requirement, must_be_installed)."""
    requirements = metadata.requires(DISTRIBUTION_NAME)
    assert requirements is not None
    for raw_requirement in requirements:
        req = Requirement(raw_requirement)
        if _is_dev_dependency(req):
            continue

        # https://packaging.pypa.io/en/latest/markers.html#usage notes that
        #   > Evaluating an extra marker with no environment is an error
        # so we pass in a dummy empty extra value here.
        must_be_installed = req.marker is None or req.marker.evaluate({"extra": ""})
        yield Dependency(req, must_be_installed)
Example #26
0
def test_format():
    root = RootDependency()
    text = ('hypothesis[django]<=3.0.0; '
            'python_version == "2.7" and '
            'platform_python_implementation == "CPython"')
    req = PackagingRequirement(text)
    deps = DependencyMaker.from_requirement(root, req)

    # test dep
    assert deps[0].name == 'hypothesis'
    assert deps[1].name == 'hypothesis[django]'
    assert str(deps[0].constraint) == '<=3.0.0'
    assert str(deps[0].marker).startswith('python_version == "2.7"')

    # test format
    req = Requirement(dep=deps[0], lock=False)
    req.extra_deps = (deps[1], )
    result = PIPConverter(lock=False)._format_req(req=req)
    assert 'hypothesis[django]' in result
    assert '<=3.0.0' in result
    assert 'python_version == "2.7"' in result
    assert 'from root' not in result
    assert result.startswith(text)
Example #27
0
    async def add_requirement(self, req: str | Requirement) -> None:
        if isinstance(req, Requirement):
            return await self.add_requirement_inner(req)

        if not req.endswith(".whl"):
            return await self.add_requirement_inner(Requirement(req))

        # custom download location
        wheel = WheelInfo.from_url(req)
        if not wheel.is_compatible():
            raise ValueError(
                f"'{wheel.filename}' is not a pure Python 3 wheel")

        await self.add_wheel(wheel, extras=set())
async def test_metadata_with_null_version():
    data = await load_nixpkgs_data(PINNED_NIXPKGS_ARGS)
    nixpkgs = NixpkgsData(data)
    pypi = PyPIData(PyPICache())

    async def f(pkg):
        return await evaluate_package_requirements(pkg, PINNED_NIXPKGS_ARGS)

    c = VersionChooser(nixpkgs, pypi, f)
    await c.require(Requirement('daiquiri==2.1.1'))
    pkg = c.package_for('daiquiri')
    assert pkg is not None
    await pkg.metadata()
    assert pkg.version == Version('2.1.1')
Example #29
0
def main(
    filesystem,
    link_dir,
    name,
    locator,
    temporary,
    installs,
    links,
    requirements,
    recreate,
    virtualenv_args,
):
    if name:
        if temporary:
            raise click.BadParameter(
                "specify only one of '-t / --temp / --temporary' or 'name'",
            )

        virtualenv = locator.for_name(name=name)
    elif temporary:
        virtualenv = locator.temporary()
        click.echo(virtualenv.binary("python").dirname())
        act = partial(virtualenv.recreate_on, filesystem=filesystem)
    elif len(installs) == 1:
        # When there's just one package to install, default to using that name.
        requirement, = installs
        name = Requirement(requirement).name
        virtualenv = locator.for_name(name=name)
    elif installs:
        raise click.BadParameter("A name is required.")
    elif len(links) == 1:
        # When there's just one binary to link, go for the gold.
        name, = installs = links
        virtualenv = locator.for_name(name=name)
    else:
        virtualenv = locator.for_directory(directory=Path.cwd())

    if recreate or temporary:
        act = partial(virtualenv.recreate_on, filesystem=filesystem)
    else:
        act = virtualenv.create

    act(arguments=virtualenv_args)
    virtualenv.install(packages=installs, requirements=requirements)

    for link in links:
        filesystem.link(
            source=virtualenv.binary(name=link),
            to=link_dir.descendant(link),
        )
Example #30
0
def _is_mlflow_requirement(requirement_string):
    """
    Returns True if `requirement_string` represents a requirement for mlflow (e.g. 'mlflow==1.2.3').
    """
    try:
        # `Requirement` throws an `InvalidRequirement` exception if `requirement_string` doesn't
        # conform to PEP 508 (https://www.python.org/dev/peps/pep-0508).
        return Requirement(requirement_string).name.lower() == "mlflow"
    except InvalidRequirement:
        # A local file path or URL falls into this branch.

        # TODO: Return True if `requirement_string` represents a project directory for MLflow
        # (e.g. '/path/to/mlflow') or git repository URL (e.g. 'https://github.com/mlflow/mlflow').
        return False