Esempio n. 1
0
 def test_complex_url_and_marker(self):
     url = "https://example.com/name;v=1.1/?query=foo&bar=baz#blah"
     req = Requirement("foo @ %s ; python_version=='3.4'" % url)
     self._assert_requirement(req,
                              "foo",
                              url=url,
                              marker='python_version == "3.4"')
Esempio n. 2
0
 def test_types(self):
     req = Requirement("foobar[quux]<2,>=3; os_name=='a'")
     assert isinstance(req.name, str)
     assert isinstance(req.extras, set)
     assert req.url is None
     assert isinstance(req.specifier, SpecifierSet)
     assert isinstance(req.marker, Marker)
Esempio n. 3
0
def collate_python_requirements(modules):
    # type: (List[libtbx.env_config.module]) -> List[packaging.requirements.Requirement]
    """Combine python requirements from a module list.

  An attempt will be made to merge any joint requirements. The requirement
  objects will have an added property 'modules', which is a set of module
  names that formed the requirement.

  Attr:
      modules (Iterable[libtbx.env_config.module]): The module list

  Returns:
      List[packaging.requirements.Requirement]: The merged requirements
  """
    requirements = []
    for module, spec in itertools.chain(*[[(x.name, y)
                                           for y in x.python_required]
                                          for x in modules
                                          if hasattr(x, "python_required")]):
        requirement = Requirement(spec)
        # Track where dependencies came from
        requirement.modules = {module}
        # Attempt to merge this with any other requirements to avoid double-specifying
        _merge_requirements(requirements, requirement)
    return requirements
Esempio n. 4
0
def update_check_dependencies_at(path, dependencies):
    project_data = deepcopy(load_project_file_at_cached(path))
    optional_dependencies = project_data['project'].get('optional-dependencies', {})

    updated = False
    for old_dependencies in optional_dependencies.values():
        new_dependencies = defaultdict(set)

        for old_dependency in old_dependencies:
            old_requirement = Requirement(old_dependency)
            name = normalize_project_name(old_requirement.name)
            if name not in dependencies:
                new_dependencies[name].add(old_dependency)
                continue

            for dependency_set in dependencies[name].values():
                for dep in dependency_set:
                    new_dependencies[name].add(dep)

        new_dependencies = sorted(d for dep_set in new_dependencies.values() for d in dep_set)
        if new_dependencies != old_dependencies:
            updated = True
            old_dependencies[:] = new_dependencies

    if updated:
        write_project_file_at(path, project_data)

    return updated
Esempio n. 5
0
def dependencies(name: str) -> Iterable[str]:
    """Return the names of all packages that the given package depends on,
    directly or indirectly.
    If metadata for any package is not available, its dependencies will
    not be in the output, but the name of the missing package will be.
    Requirements that depend on for example Python version or OS will be
    evaluated for the current process.
    """

    done = set()
    todo = {(name, None)}
    while todo:
        currPair = todo.pop()
        done.add(currPair)
        currName, currExtra = currPair

        dist = getDistribution(currName)
        if dist is None:
            continue

        env = dict(extra=currExtra)
        if dist.requires is not None:
            for reqStr in dist.requires:
                req = Requirement(reqStr)
                marker = req.marker
                if marker is None or marker.evaluate(environment=env):
                    newName = req.name
                    for newExtra in {None} | req.extras:
                        newPair = (newName, newExtra)
                        if newPair not in done:
                            todo.add(newPair)

    return {n for n, e in done if n != name}
Esempio n. 6
0
async def get_package_from_pypi(package_name, plugin_path):
    """
    Download a package from PyPI.

    :param name: name of the package to download from PyPI
    :return: String path to the package
    """
    config = BandersnatchConfig().config
    config["mirror"]["master"] = "https://pypi.org"
    config["mirror"]["workers"] = "1"
    config["mirror"]["directory"] = plugin_path
    if not config.has_section("plugins"):
        config.add_section("plugins")
    config["plugins"]["enabled"] = "blocklist_release\n"
    if not config.has_section("allowlist"):
        config.add_section("allowlist")
    config["plugins"]["enabled"] += "allowlist_release\nallowlist_project\n"
    config["allowlist"]["packages"] = "\n".join([package_name])
    os.makedirs(os.path.join(plugin_path, "dist"), exist_ok=True)
    async with Master("https://pypi.org/") as master:
        mirror = BandersnatchMirror(homedir=plugin_path, master=master)
        name = Requirement(package_name).name
        result = await mirror.synchronize([name])
    package_found = False

    for package in result[name]:
        current_path = os.path.join(plugin_path, package)
        destination_path = os.path.join(plugin_path, "dist",
                                        os.path.basename(package))
        shutil.move(current_path, destination_path)
        package_found = True
    return package_found
Esempio n. 7
0
 def test_parseexception_error_msg(self):
     with pytest.raises(InvalidRequirement) as e:
         Requirement("toto 42")
     assert "Expected stringEnd" in str(
         e.value) or ("Expected string_end" in str(
             e.value)  # pyparsing>=3.0.0
                      )
Esempio n. 8
0
    def test_sys_platform_linux_equal(self):
        req = Requirement('something>=1.2.3; sys_platform == "foo"')

        assert req.name == "something"
        assert req.marker is not None
        assert req.marker.evaluate(dict(sys_platform="foo")) is True
        assert req.marker.evaluate(dict(sys_platform="bar")) is False
Esempio n. 9
0
    def check_bblfsh_driver_versions(self, versions: Iterable[str]) -> None:
        """
        Ensure that the Babelfish drivers match the required versions.

        The check is performed by `packaging.version`.

        :param versions: setup.py-like version specifiers, e.g. "javascript==1.3.0".
        :return: Nothing
        :raise UnsatisfiedDriverVersionError: if there is one or more mismatches.
        """
        existing = self.get_bblfsh().SupportedLanguages(
            bblfsh.aliases.SupportedLanguagesRequest()).languages
        existing = {
            driver.language: Version(driver.version)
            for driver in existing
        }
        mismatched = []
        for reqstr in versions:
            req = Requirement(reqstr)
            try:
                ver = existing[req.name]
            except KeyError:
                mismatched.append(
                    (req.name,
                     "not installed, but required %s" % req.specifier))
                continue
            if ver not in req.specifier:
                mismatched.append(
                    (req.name,
                     "%s does not satisfy %s" % (ver, req.specifier)))
        if mismatched:
            raise UnsatisfiedDriverVersionError(mismatched)
Esempio n. 10
0
async def test_compiles(version_chooser):
    await version_chooser.require(Requirement("sampleproject"))
    result = build_nix_expression(version_chooser.package_for('sampleproject'),
                                  NO_REQUIREMENTS,
                                  NO_METADATA,
                                  sha256='aaaaaa')
    assert await is_valid_nix(result), "Invalid Nix expression"
Esempio n. 11
0
async def test_download_from_pypi():
    data = PyPIData(PyPICache())
    req = Requirement('faraday-agent_dIspatcher==1.0')
    (package, ) = await data.from_requirement(req)
    print(package)
    assert package.sha256 == 'd7a549af9047f1af0d824cf29dd93c5449241266a24e8687d3399cb15bfc61ae'
    path = await package.source()
Esempio n. 12
0
def test_last_version_from_pypi():
    pytest.importorskip("packaging")
    from packaging.requirements import Requirement

    from micropip._micropip import find_wheel

    requirement = Requirement("dummy_module")
    versions = ["0.0.1", "0.15.5", "0.9.1"]

    # building metadata as returned from
    # https://pypi.org/pypi/{pkgname}/json
    releases = {}
    for v in versions:
        filename = f"dummy_module-{v}-py3-none-any.whl"
        releases[v] = [{
            "filename": filename,
            "url": filename,
            "digests": None
        }]

    metadata = {"releases": releases}

    # get version number from find_wheel
    wheel = find_wheel(metadata, requirement)

    assert str(wheel.version) == "0.15.5"
Esempio n. 13
0
def split_requirement(requirement: str) -> t.Tuple[str, str]:
    """
    Split requirements. 'bentoml>=1.0.0' -> ['bentoml', '>=1.0.0']
    """
    req = Requirement(requirement)
    name = req.name.replace("-", "_")
    return name, str(req.specifier)
Esempio n. 14
0
def get_requirements(package):
    """
    This wraps `importlib.metadata.requires` to not be garbage.

    Parameters
    ----------
    package : str
        Package you want requirements for.

    Returns
    -------
    `dict`
        A dictionary of requirements with keys being the extra requirement group names.
        The values are a nested dictionary with keys being the package names and
        values being the `packaging.requirements.Requirement` objects.
    """
    requirements: list = requires(package)
    requires_dict = defaultdict(dict)
    for requirement in requirements:
        req = Requirement(requirement)
        package_name, package_marker = req.name, req.marker
        if package_marker and "extra ==" in str(package_marker):
            group = str(package_marker).split("extra == ")[1].strip('"').strip(
                "'").strip()
        else:
            group = "required"
        # De-duplicate (the same package could appear more than once in the extra == 'all' group)
        if package_name in requires_dict[group]:
            continue
        requires_dict[group][package_name] = req
    return requires_dict
Esempio n. 15
0
    def _validate_plugins(self) -> None:
        required_plugins = sorted(self.getini("required_plugins"))
        if not required_plugins:
            return

        # Imported lazily to improve start-up time.
        from packaging.version import Version
        from packaging.requirements import InvalidRequirement, Requirement

        plugin_info = self.pluginmanager.list_plugin_distinfo()
        plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info}

        missing_plugins = []
        for required_plugin in required_plugins:
            spec = None
            try:
                spec = Requirement(required_plugin)
            except InvalidRequirement:
                missing_plugins.append(required_plugin)
                continue

            if spec.name not in plugin_dist_info:
                missing_plugins.append(required_plugin)
            elif Version(plugin_dist_info[spec.name]) not in spec.specifier:
                missing_plugins.append(required_plugin)

        if missing_plugins:
            fail(
                "Missing required plugins: {}".format(", ".join(missing_plugins)),
                pytrace=False,
            )
Esempio n. 16
0
 def test_types_with_url(self):
     req = Requirement("foobar @ http://foo.com")
     assert isinstance(req.name, str)
     assert isinstance(req.extras, set)
     assert isinstance(req.url, str)
     assert isinstance(req.specifier, SpecifierSet)
     assert req.marker is None
Esempio n. 17
0
 def get_package_dependencies(self, extras=None) -> List[Requirement]:
     self._ensure_meta_present()
     if extras is None:
         extras = set()
     result = []
     requires = self._distribution_meta.requires or []
     for v in requires:
         req = Requirement(v)
         markers = getattr(req.marker, "_markers", ()) or ()
         for _at, (m_key, op,
                   m_val) in ((j, i) for j, i in enumerate(markers)
                              if isinstance(i, tuple) and len(i) == 3):
             if m_key.value == "extra" and op.value == "==":
                 extra = m_val.value
                 break
         else:
             extra, _at = None, None
         if extra is None or extra in extras:
             if _at is not None:
                 # noinspection PyProtectedMember
                 del markers[_at]
                 _at -= 1
                 if _at > 0 and markers[_at] in ("and", "or"):
                     del markers[_at]
                 # noinspection PyProtectedMember
                 if len(markers) == 0:
                     req.marker = None
             result.append(req)
     return result
Esempio n. 18
0
    def test_sys_platform_linux_in(self):
        req = Requirement("aviato>=1.2.3; 'f' in sys_platform")

        assert req.name == "aviato"
        assert req.marker is not None
        assert req.marker.evaluate(dict(sys_platform="foo")) is True
        assert req.marker.evaluate(dict(sys_platform="bar")) is False
Esempio n. 19
0
 def _determine_filtered_package_names(self) -> List[str]:
     """
     Return a list of package names to be filtered base on the configuration
     file.
     """
     # This plugin only processes packages, if the line in the packages
     # configuration contains a PEP440 specifier it will be processed by the
     # blocklist release filter.  So we need to remove any packages that
     # are not applicable for this plugin.
     filtered_packages: Set[str] = set()
     try:
         lines = self.blocklist["packages"]
         package_lines = lines.split("\n")
     except KeyError:
         package_lines = []
     for package_line in package_lines:
         package_line = package_line.strip()
         if not package_line or package_line.startswith("#"):
             continue
         package_requirement = Requirement(package_line)
         if package_requirement.specifier:
             continue
         if package_requirement.name != package_line:
             logger.debug(
                 "Package line %r does not match requirement name %r",
                 package_line,
                 package_requirement.name,
             )
             continue
         filtered_packages.add(canonicalize_name(package_requirement.name))
     logger.debug("Project blocklist is %r", list(filtered_packages))
     return list(filtered_packages)
Esempio n. 20
0
def requirement_is_django(req):
    try:
        if isinstance(req, six.string_types):
            req = Requirement(req)
        return req.name.lower() == 'django'
    except InvalidRequirement:
        return False
Esempio n. 21
0
 async def run(self):
     """
     If includes is specified, then only sync those,else try to sync all other packages
     """
     # TODO Change Bandersnatch internal API to take proxy settings in from config parameters
     if self.remote.proxy_url:
         environ['http_proxy'] = self.remote.proxy_url
     # local & global timeouts defaults to 10secs and 5 hours
     async with Master(self.remote.url) as master:
         deferred_download = self.remote.policy != Remote.IMMEDIATE
         workers = self.remote.download_concurrency or self.remote.DEFAULT_DOWNLOAD_CONCURRENCY
         async with ProgressReport(message="Fetching Project Metadata",
                                   code="sync.fetching.project") as p:
             pmirror = PulpMirror(
                 serial=0,  # Serial currently isn't supported by Pulp
                 master=master,
                 workers=workers,
                 deferred_download=deferred_download,
                 python_stage=self,
                 progress_report=p,
             )
             packages_to_sync = None
             if self.remote.includes:
                 packages_to_sync = [
                     Requirement(pkg).name for pkg in self.remote.includes
                 ]
             await pmirror.synchronize(packages_to_sync)
Esempio n. 22
0
def get_requirements(lines):
    """parse the given lines and return a dict with pkg_name->version.
    lines must follow PEP0508"""
    requires = {}
    for l in lines:
        # skip comments and empty lines
        if l.startswith('#') or len(l.strip()) == 0:
            continue
        # remove trailing comments
        l = l.split('#')[0].rstrip(' ')
        r = Requirement(l)
        # check if we need the requirement
        if r.marker:
            # TODO (toabctl): currently we hardcode python 2.7 and linux2
            # see https://www.python.org/dev/peps/pep-0508/#environment-markers
            marker_env = {'python_version': '2.7', 'sys_platform': 'linux'}
            if not r.marker.evaluate(environment=marker_env):
                continue
        if r.specifier:
            # we want the lowest possible version
            # NOTE(toabctl): "min(r.specifier)" doesn't work.
            # see https://github.com/pypa/packaging/issues/69
            lowest = None
            for s in r.specifier:
                # we don't want a lowest version which is not allowed
                if s.operator == '!=':
                    continue
                if not lowest or s.version < lowest.version:
                    lowest = s

            if lowest:
                requires[r.name] = lowest.version
    return requires
Esempio n. 23
0
 def test_types_with_nothing(self):
     req = Requirement("foobar")
     assert isinstance(req.name, str)
     assert isinstance(req.extras, set)
     assert req.url is None
     assert isinstance(req.specifier, SpecifierSet)
     assert req.marker is None
Esempio n. 24
0
def remove_unisolated_requirements(requires: set[str]) -> set[str]:
    for reqstr in list(requires):
        req = Requirement(reqstr)
        for avoid_name in UNISOLATED_PACKAGES:
            if avoid_name in req.name:
                requires.remove(reqstr)
    return requires
Esempio n. 25
0
    async def get_dependencies(self, name: str, version: str,
                               extra: Optional[str] = None) -> Tuple[Requirement, ...]:
        cache = TextCache('deps', name, str(version))
        deps = cache.load()
        if deps is None:
            task = self._get_from_json(name=name, version=version)
            deps = await asyncio.gather(asyncio.ensure_future(task))
            deps = deps[0]
            cache.dump(deps)
        elif deps == ['']:
            return ()

        # filter result
        result = []
        for dep in deps:
            try:
                req = Requirement(dep)
            except InvalidRequirement as e:
                msg = 'cannot parse requirement: {} from {} {}'
                try:
                    # try to parse with dropped out markers
                    req = Requirement(dep.split(';')[0])
                except InvalidRequirement:
                    raise ValueError(msg.format(dep, name, version)) from e
                else:
                    msg = 'cannot parse requirement: "{}" from {} {}'
                    logger.warning(msg.format(dep, name, version))

            try:
                dep_extra = req.marker and Markers(req.marker).extra
            except ValueError:  # unsupported operation for version marker python_version: in
                dep_extra = None

            # it's not extra and we want not extra too
            if dep_extra is None and extra is None:
                result.append(req)
                continue
            # it's extra, but we want not the extra
            # or it's not the extra, but we want extra.
            if dep_extra is None or extra is None:
                continue
            # it's extra and we want this extra
            elif dep_extra == extra:
                result.append(req)
                continue

        return tuple(result)
Esempio n. 26
0
 def parse_file(self) -> Generator[Requirement, None, None]:
     with open(self.file, 'r') as fp:
         for cnt, line in enumerate(fp):
             line = line.strip()
             if not is_comment(line) and len(line) > 0:
                 rec = Requirement(line)
                 rec.lino = cnt + 1
                 yield rec
Esempio n. 27
0
 def cached_install(self, deps, section, of_type):
     conf_deps = [str(i) for i in deps]
     with self._cache.compare(conf_deps, section, of_type) as (eq, old):
         if eq is True:
             return True
         if old is None:
             old = []
         missing = [Requirement(i) for i in (set(old) - set(conf_deps))]
         if (
             missing
         ):  # no way yet to know what to uninstall here (transitive dependencies?)
             # bail out and force recreate
             raise Recreate()
         new_deps_str = set(conf_deps) - set(old)
         new_deps = [Requirement(i) for i in new_deps_str]
         self.install_python_packages(packages=new_deps)
     return False
Esempio n. 28
0
def get_deps():
    root = RootDependency()
    response = requests.get(URL)
    for info in response.json()['rows']:
        yield from DependencyMaker.from_requirement(
            source=root,
            req=Requirement(info['project']),
        )
Esempio n. 29
0
def valid_pypi_name(package_spec: str) -> Optional[str]:
    try:
        package_req = Requirement(package_spec)
    except InvalidRequirement:
        # not a valid PEP508 package specification
        return None

    return package_req.name
Esempio n. 30
0
async def test_chosen_package_requirements_marker():
    nixpkgs = NixpkgsData(NIXPKGS_JSON)
    pypi = PyPIData(DummyCache(sampleproject=SAMPLEPROJECT_DATA))
    req = Requirement("notexistent; python_version<'3'")
    reqs_f = dummy_package_requirements({
        "sampleproject": ([req], [], [req]),
    })
    c = VersionChooser(nixpkgs, pypi, reqs_f)
    await c.require(Requirement('sampleproject'))
    sampleproject = c.package_for('sampleproject')
    reqs: PackageRequirements = await reqs_f(sampleproject)

    chosen: ChosenPackageRequirements
    chosen = ChosenPackageRequirements.from_package_requirements(
        reqs, c, load_tests=True)

    assert len(chosen.runtime_requirements) == 0