Ejemplo n.º 1
0
def _package_deps(package, deps=None, ignore=()):
    """Recursive gather package's named transitive dependencies."""
    if deps is None:
        deps = []
    try:
        pdeps = pkgmd.requires(package) or ()
    except pkgmd.PackageNotFoundError:
        return deps
    for r in pdeps:
        # skip optional deps
        if ';' in r and 'extra' in r:
            continue
        for idx, c in enumerate(r):
            if not c.isalnum() and c not in ('-', '_', '.'):
                break
        if idx + 1 == len(r):
            idx += 1
        pkg_name = r[:idx]
        if pkg_name in ignore:
            continue
        if pkg_name not in deps:
            try:
                _package_deps(pkg_name, deps, ignore)
            except pkgmd.PackageNotFoundError:
                continue
            deps.append(pkg_name)
    return deps
Ejemplo n.º 2
0
def get_requirements(package):
    """
    This wraps `importlib.metadata.requires` to not be garbage.

    Parameters
    ----------
    package : str
        Package you want requirements for.

    Returns
    -------
    `dict`
        A dictionary of requirements with keys being the extra requirement group names.
        The values are a nested dictionary with keys being the package names and
        values being the `packaging.requirements.Requirement` objects.
    """
    requirements: list = requires(package)
    requires_dict = defaultdict(dict)
    for requirement in requirements:
        req = Requirement(requirement)
        package_name, package_marker = req.name, req.marker
        if package_marker and "extra ==" in str(package_marker):
            group = str(package_marker).split("extra == ")[1].strip('"').strip(
                "'").strip()
        else:
            group = "required"
        # De-duplicate (the same package could appear more than once in the extra == 'all' group)
        if package_name in requires_dict[group]:
            continue
        requires_dict[group][package_name] = req
    return requires_dict
Ejemplo n.º 3
0
def about(file: typing.TextIO = None) -> None:
    f"""Print information about the package

     ``> python -m {__package__}.about``

    Args:
        file: Output stream (Defaults to stdout)
    """
    metadata = importlib_metadata.metadata(__package__)  # type: ignore
    print(f"# {metadata['Name']}", file=file)
    print(f"{metadata['Summary']}", file=file)
    print(f"{metadata['Home-page']}", file=file)

    name_width = 24
    versions = {}
    versions["platform"] = platform.platform(aliased=True)
    versions[__package__] = __version__
    versions["python"] = sys.version[0:5]

    for req in importlib_metadata.requires(__package__):  # type: ignore
        name = re.split("[; =><]", req)[0]
        try:
            versions[name] = importlib_metadata.version(name)  # type: ignore
        except Exception:  # pragma: no cover
            pass

    print(file=file)
    print("# Configuration", file=file)
    for name, vers in versions.items():
        print(name.ljust(name_width), vers, file=file)
    print(file=file)
Ejemplo n.º 4
0
 def __init__(self, name, path=None, file_name=None, parent=None):
     self.name = name
     self.file = file_name
     self.path = path
     self.code = None
     self.parent = parent
     self.global_names = set()
     self.exclude_names = set()
     self.ignore_names = set()
     self.source_is_zip_file = False
     self.in_import = True
     self.store_in_file_system = True
     # distribution files (metadata)
     dist_files = []
     packages = [name.replace(".", "-")]
     try:
         requires = importlib_metadata.requires(packages[0])
     except importlib_metadata.PackageNotFoundError:
         requires = None
     if requires is not None:
         packages += [req.partition(" ")[0] for req in requires]
     for package_name in packages:
         try:
             files = importlib_metadata.files(package_name)
         except importlib_metadata.PackageNotFoundError:
             files = None
         if files is not None:
             # cache file names to use in write modules
             for file in files:
                 if not file.match('*.dist-info/*'):
                     continue
                 dist_path = str(file.locate())
                 arc_path = file.as_posix()
                 dist_files.append((dist_path, arc_path))
     self.dist_files = dist_files
Ejemplo n.º 5
0
def _plugin_dist_dependencies(plugin_dist_name: str) -> Dict[str, str]:
    """Returns overview of all dependencies (indirect + direct) of a given
    plugin project installed in the current environment.

    Key is package name of dependency, value is (installed) version string.
    """

    untraversed_dependencies = set([plugin_dist_name])
    requirements = {}

    while untraversed_dependencies:
        sub_dependencies = requires(untraversed_dependencies.pop())

        if sub_dependencies is None:
            continue

        for sub_dependency in sub_dependencies:
            split = re.split(r"[;<>~=()]", sub_dependency, 1)
            package_name = split[0].strip().replace("_", "-").lower()

            if package_name not in requirements:
                # Only include package in dependency list
                # if it is not an "extra" dependency...
                if len(split) == 1 or "extra" not in split[1]:
                    try:
                        # ...and if it is actually installed (there are dependencies
                        # in setup.py that e.g. are not installed on certain Python
                        # versions and operating system combinations).
                        requirements[package_name] = version(package_name)
                        untraversed_dependencies.add(package_name)
                    except PackageNotFoundError:
                        pass

    return {k: requirements[k] for k in sorted(requirements)}
Ejemplo n.º 6
0
def get_requirements(package):
    """
    This wraps `importlib.metadata.requires` to not be garbage.

    Parameters
    ----------
    package : str
        Package you want requirements for.

    Returns
    -------
    `dict`
        A dictionary of requirements with keys being the extra requirement group names.
    """
    requirements: list = requires(package)
    requires_dict = defaultdict(list)
    for requirement in requirements:
        req = Requirement(requirement)
        package_name, package_marker = req.name, req.marker
        if package_marker and "extra ==" in str(package_marker):
            group = str(package_marker).split("extra == ")[1].strip('"').strip(
                "'").strip()
            requires_dict[group].append(package_name)
        else:
            requires_dict["required"].append(package_name)
    return requires_dict
Ejemplo n.º 7
0
 def test_requires_egg_info_empty(self):
     fixtures.build_files(
         {
             'requires.txt': '',
         },
         self.site_dir.joinpath('egginfo_pkg.egg-info'),
     )
     deps = requires('egginfo-pkg')
     assert deps == []
Ejemplo n.º 8
0
def get_additional_deps_for_extra(extra_name: str) -> List[str]:
    all_requirements = requires("acryl-datahub") or []
    # filter for base dependencies
    base_deps = set(
        [x.split(";")[0] for x in all_requirements if "extra ==" not in x])
    # filter for dependencies for this extra
    extra_deps = set([
        x.split(";")[0] for x in all_requirements
        if f'extra == "{extra_name}"' in x
    ])
    # calculate additional deps that this extra adds
    delta_deps = extra_deps - base_deps
    return list(delta_deps)
Ejemplo n.º 9
0
def get_cirrus_lib_requirements() -> List[str]:
    '''
    Get the cirrus-lib dependencies.
    '''
    try:
        from importlib import metadata
    except ImportError:
        import importlib_metadata as metadata

    return [
        req.split(';')[0].translate(str.maketrans('', '', ' ()'))
        for req in metadata.requires('cirrus-lib')
    ]
Ejemplo n.º 10
0
def _generic_dependencies() -> Iterable[Dependency]:
    """Yield pairs (requirement, must_be_installed)."""
    requirements = metadata.requires(DISTRIBUTION_NAME)
    assert requirements is not None
    for raw_requirement in requirements:
        req = Requirement(raw_requirement)
        if _is_dev_dependency(req):
            continue

        # https://packaging.pypa.io/en/latest/markers.html#usage notes that
        #   > Evaluating an extra marker with no environment is an error
        # so we pass in a dummy empty extra value here.
        must_be_installed = req.marker is None or req.marker.evaluate({"extra": ""})
        yield Dependency(req, must_be_installed)
Ejemplo n.º 11
0
def _dependencies_for_extra(extra: str) -> Iterable[Dependency]:
    """Yield additional dependencies needed for a given `extra`."""
    requirements = metadata.requires(DISTRIBUTION_NAME)
    assert requirements is not None
    for raw_requirement in requirements:
        req = Requirement(raw_requirement)
        if _is_dev_dependency(req):
            continue
        # Exclude mandatory deps by only selecting deps needed with this extra.
        if (
            req.marker is not None
            and req.marker.evaluate({"extra": extra})
            and not req.marker.evaluate({"extra": ""})
        ):
            yield Dependency(req, True)
Ejemplo n.º 12
0
def gui(config_name: str) -> None:

    from packaging.version import Version
    from packaging.requirements import Requirement

    try:
        from importlib.metadata import entry_points, requires, version  # type: ignore
    except ImportError:
        from importlib_metadata import entry_points, requires, version  # type: ignore

    # find all "maestral_gui" entry points registered by other packages
    gui_entry_points = entry_points().get("maestral_gui")

    if not gui_entry_points or len(gui_entry_points) == 0:
        raise cli.CliException(
            "No maestral GUI installed. Please run 'pip3 install maestral[gui]'."
        )

    # check if 1st party defaults "maestral_cocoa" or "maestral_qt" are installed
    default_gui = "maestral_cocoa" if sys.platform == "darwin" else "maestral_qt"
    default_entry_point = next(
        (e for e in gui_entry_points if e.name == default_gui), None)

    if default_entry_point:
        # check gui requirements
        requirements = [Requirement(r)
                        for r in requires("maestral")]  # type: ignore

        for r in requirements:
            if r.marker and r.marker.evaluate({"extra": "gui"}):
                version_str = version(r.name)
                if not r.specifier.contains(Version(version_str),
                                            prereleases=True):
                    raise cli.CliException(
                        f"{r.name}{r.specifier} required but you have {version_str}"
                    )

        # load entry point
        run = default_entry_point.load()

    else:
        # load any 3rd party GUI
        fallback_entry_point = next(iter(gui_entry_points))
        run = fallback_entry_point.load()

    run(config_name)
Ejemplo n.º 13
0
def test_requires():
    assert im.requires("foo-bar") == ["Werkzeug (>=0.15)", "Jinja2 (>=2.10.1)"]
Ejemplo n.º 14
0
import os
import sys
import configparser
from datetime import datetime

from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet

try:
    import importlib.metadata as importlib_metadata
except ImportError:
    import importlib_metadata

# -- Check for missing dependencies -------------------------------------------
missing_requirements = {}
for line in importlib_metadata.requires('astropy'):
    if 'extra == "docs"' in line:
        req = Requirement(line.split(';')[0])
        req_package = req.name.lower()
        req_specifier = str(req.specifier)

        try:
            version = importlib_metadata.version(req_package)
        except importlib_metadata.PackageNotFoundError:
            missing_requirements[req_package] = req_specifier

        if version not in SpecifierSet(req_specifier):
            missing_requirements[req_package] = req_specifier

if missing_requirements:
    print('The following packages could not be found and are required to '
Ejemplo n.º 15
0
# be accessible, and the documentation will not build correctly.
# See sphinx_astropy.conf for which values are set there.

import os
import sys
import configparser
from datetime import datetime
from importlib import metadata

import doctest
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet

# -- Check for missing dependencies -------------------------------------------
missing_requirements = {}
for line in metadata.requires('astropy'):
    if 'extra == "docs"' in line:
        req = Requirement(line.split(';')[0])
        req_package = req.name.lower()
        req_specifier = str(req.specifier)

        try:
            version = metadata.version(req_package)
        except metadata.PackageNotFoundError:
            missing_requirements[req_package] = req_specifier

        if version not in SpecifierSet(req_specifier, prereleases=True):
            missing_requirements[req_package] = req_specifier

if missing_requirements:
    print('The following packages could not be found and are required to '
Ejemplo n.º 16
0
from importlib import metadata

# print(metadata.version('pip'))

# metadados_pip = metadata.metadata('pip')
#
# print(list(metadados_pip))
#
# print(metadados_pip['Project-URL'])

# print(len(metadata.files('pip')))

print(metadata.requires('django'))
Ejemplo n.º 17
0
# The new importlib.metadata module provides (provisional) support for reading metadata from
# third-party packages. For example, it can extract an installed package’s version number, list of
# entry points, and more:

# Note following example requires that the popular "requests"
# package has been installed.
from importlib.metadata import version, requires, files

print(version('requests'))
# '2.22.0'

print(list(requires('requests')))
# ['chardet (<3.1.0,>=3.0.2)']

print(list(files('requests'))[:5])
# [PackagePath('requests-2.22.0.dist-info/INSTALLER'),
#  PackagePath('requests-2.22.0.dist-info/LICENSE'),
#  PackagePath('requests-2.22.0.dist-info/METADATA'),
#  PackagePath('requests-2.22.0.dist-info/RECORD'),
#  PackagePath('requests-2.22.0.dist-info/WHEEL')]
from importlib import metadata

print(metadata.version('pip'))
print(metadata.version('textblob'))

metadados_pip = metadata.metadata('pip')

print(list(metadados_pip))

print(metadados_pip['Project-URL'])

print(len(metadata.files('pip')))

print(metadata.requires('mypy'))
Ejemplo n.º 19
0
def _get_extension_packages():
    if not _mfext_supported:
        _ext_debug("Not supported for your Python version -- 3.4+ is needed")
        return [], {}

    # If we have an INFO file with the appropriate information (if running from a saved
    # code package for example), we use that directly
    # Pre-compute on _extension_points
    from metaflow import INFO_FILE

    try:
        with open(INFO_FILE, "r") as contents:
            all_pkg, ext_to_pkg = json.load(contents).get(
                "ext_info", (None, None))
            if all_pkg is not None and ext_to_pkg is not None:
                _ext_debug("Loading pre-computed information from INFO file")
                # We need to properly convert stuff in ext_to_pkg
                for k, v in ext_to_pkg.items():
                    v = [MFExtPackage(*d) for d in v]
                    ext_to_pkg[k] = v
                return all_pkg, ext_to_pkg
    except IOError:
        pass

    # Check if we even have extensions
    try:
        extensions_module = importlib.import_module(EXT_PKG)
    except ImportError as e:
        if _py_ver >= 36:
            # e.name is set to the name of the package that fails to load
            # so don't error ONLY IF the error is importing this module (but do
            # error if there is a transitive import error)
            if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
                raise
            return {}, {}

    # At this point, we look at all the paths and create a set. As we find distributions
    # that match it, we will remove from the set and then will be left with any
    # PYTHONPATH "packages"
    all_paths = set(extensions_module.__path__)
    _ext_debug("Found packages present at %s" % str(all_paths))

    list_ext_points = [x.split(".") for x in _extension_points]
    init_ext_points = [x[0] for x in list_ext_points]

    # TODO: This relies only on requirements to determine import order; we may want
    # other ways of specifying "load me after this if it exists" without depending on
    # the package. One way would be to rely on the description and have that info there.
    # Not sure of the use though so maybe we can skip for now.
    mf_ext_packages = []
    # Key: distribution name/full path to package
    # Value:
    #  Key: TL package name
    #  Value: MFExtPackage
    extension_points_to_pkg = defaultdict(dict)
    config_to_pkg = defaultdict(list)
    for dist in metadata.distributions():
        if any([
                pkg == EXT_PKG
                for pkg in (dist.read_text("top_level.txt") or "").split()
        ]):
            _ext_debug("Found extension package '%s'..." %
                       dist.metadata["Name"])

            # Remove the path from the paths to search. This is not 100% accurate because
            # it is possible that at that same location there is a package and a non
            # package but it is exceedingly unlikely so we are going to ignore this.
            all_paths.discard(dist.locate_file(EXT_PKG).as_posix())

            mf_ext_packages.append(dist.metadata["Name"])

            # At this point, we check to see what extension points this package
            # contributes to. This is to enable multiple namespace packages to contribute
            # to the same extension point (for example, you may have multiple packages
            # that have plugins)
            for f in dist.files:
                # Make sure EXT_PKG is a ns package
                if f.as_posix() == "%s/__init__.py" % EXT_PKG:
                    raise RuntimeError(
                        "Package '%s' providing '%s' is not an implicit namespace "
                        "package as required" %
                        (dist.metadata["Name"], EXT_PKG))

                parts = list(f.parts)
                if (len(parts) > 1 and parts[0] == EXT_PKG
                        and parts[1] in init_ext_points):
                    # This is most likely a problem as we need an intermediate "identifier"
                    raise RuntimeError(
                        "Package '%s' should conform to %s.X.%s and not %s.%s where "
                        "X is your organization's name for example" %
                        (dist.metadata["Name"], EXT_PKG, parts[1], EXT_PKG,
                         parts[1]))

                if len(parts) > 3 and parts[0] == EXT_PKG:
                    # We go over _extension_points *in order* to make sure we get more
                    # specific paths first

                    # To give useful errors in case multiple TL packages in one package
                    dist_full_name = "%s[%s]" % (dist.metadata["Name"],
                                                 parts[1])
                    for idx, ext_list in enumerate(list_ext_points):
                        if (len(parts) > len(ext_list) + 2
                                and parts[2:2 + len(ext_list)] == ext_list):
                            # Check if this is an "init" file
                            config_module = None

                            if (len(parts) == len(ext_list) + 3
                                    and EXT_CONFIG_REGEXP.match(
                                        parts[-1]) is not None):
                                parts[-1] = parts[-1][:-3]  # Remove the .py
                                config_module = ".".join(parts)

                                config_to_pkg[config_module].append(
                                    dist_full_name)
                            cur_pkg = (extension_points_to_pkg[
                                _extension_points[idx]].setdefault(
                                    dist.metadata["Name"], {}).get(parts[1]))
                            if cur_pkg is not None:
                                if (config_module is not None
                                        and cur_pkg.config_module is not None):
                                    raise RuntimeError(
                                        "Package '%s' defines more than one "
                                        "configuration file for '%s': '%s' and '%s'"
                                        % (
                                            dist_full_name,
                                            _extension_points[idx],
                                            config_module,
                                            cur_pkg.config_module,
                                        ))
                                if config_module is not None:
                                    _ext_debug(
                                        "\tTL %s found config file '%s'" %
                                        (parts[1], config_module))
                                    extension_points_to_pkg[_extension_points[
                                        idx]][dist.metadata["Name"]][
                                            parts[1]] = MFExtPackage(
                                                package_name=dist_full_name,
                                                tl_package=parts[1],
                                                config_module=config_module,
                                            )
                            else:
                                _ext_debug(
                                    "\tTL %s extends '%s' with config '%s'" %
                                    (parts[1], _extension_points[idx],
                                     config_module))
                                extension_points_to_pkg[_extension_points[
                                    idx]][dist.metadata["Name"]][
                                        parts[1]] = MFExtPackage(
                                            package_name=dist_full_name,
                                            tl_package=parts[1],
                                            config_module=config_module,
                                        )
                            break

    # At this point, we have all the packages that contribute to EXT_PKG,
    # we now check to see if there is an order to respect based on dependencies. We will
    # return an ordered list that respects that order and is ordered alphabetically in
    # case of ties. We do not do any checks because we rely on pip to have done those.
    pkg_to_reqs_count = {}
    req_to_dep = {}
    mf_ext_packages_set = set(mf_ext_packages)
    for pkg_name in mf_ext_packages:
        req_count = 0
        req_pkgs = [x.split()[0] for x in metadata.requires(pkg_name) or []]
        for req_pkg in req_pkgs:
            if req_pkg in mf_ext_packages_set:
                req_count += 1
                req_to_dep.setdefault(req_pkg, []).append(pkg_name)
        pkg_to_reqs_count[pkg_name] = req_count

    # Find roots
    mf_pkg_list = []
    to_process = []
    for pkg_name, count in pkg_to_reqs_count.items():
        if count == 0:
            to_process.append(pkg_name)

    # Add them in alphabetical order
    to_process.sort()
    mf_pkg_list.extend(to_process)
    # Find rest topologically
    while to_process:
        next_round = []
        for pkg_name in to_process:
            del pkg_to_reqs_count[pkg_name]
            for dep in req_to_dep.get(pkg_name, []):
                cur_req_count = pkg_to_reqs_count[dep]
                if cur_req_count == 1:
                    next_round.append(dep)
                else:
                    pkg_to_reqs_count[dep] = cur_req_count - 1
        # Add those in alphabetical order
        next_round.sort()
        mf_pkg_list.extend(next_round)
        to_process = next_round

    # Check that we got them all
    if len(pkg_to_reqs_count) > 0:
        raise RuntimeError("Unresolved dependencies in %s: %s" %
                           (EXT_PKG, str(pkg_to_reqs_count)))

    # We check if we have any additional packages that were not yet installed that
    # we need to use. We always put them *last*.
    if len(all_paths) > 0:
        _ext_debug("Non installed packages present at %s" % str(all_paths))
        packages_to_add = set()
        for package_path in all_paths:
            _ext_debug("Walking path %s" % package_path)
            base_depth = len(package_path.split("/"))
            for root, dirs, files in os.walk(package_path):
                parts = root.split("/")
                cur_depth = len(parts)
                if cur_depth == base_depth:
                    if "__init__.py" in files:
                        raise RuntimeError(
                            "%s at '%s' is not an implicit namespace package as required"
                            % (EXT_PKG, root))
                    for d in dirs:
                        if d in init_ext_points:
                            raise RuntimeError(
                                "Package at %s should conform to %s.X.%s and not %s.%s "
                                "where X is your organization's name for example"
                                % (root, EXT_PKG, d, EXT_PKG, d))
                elif cur_depth > base_depth + 1:
                    # We want at least a TL name and something under
                    tl_name = parts[base_depth]
                    tl_fullname = "/".join([package_path, tl_name])
                    prefix_match = parts[base_depth + 1:]
                    next_dirs = None
                    for idx, ext_list in enumerate(list_ext_points):
                        if prefix_match == ext_list:
                            # Check for any "init" files
                            init_files = [
                                x for x in map(EXT_CONFIG_REGEXP.match, files)
                                if x is not None
                            ]
                            config_module = None
                            if len(init_files) > 1:
                                raise RuntimeError(
                                    "Package at %s defines more than one configuration "
                                    "file for '%s': %s" % (
                                        tl_fullname,
                                        ".".join(prefix_match),
                                        ", and ".join([
                                            "'%s'" % x.group(0)
                                            for x in init_files
                                        ]),
                                    ))
                            elif len(init_files) == 1:
                                config_module = ".".join(
                                    parts[base_depth - 1:] +
                                    [init_files[0].group(0)[:-3]])
                                config_to_pkg[config_module].append(
                                    tl_fullname)
                            d = extension_points_to_pkg[
                                _extension_points[idx]][tl_fullname] = dict()
                            d[tl_name] = MFExtPackage(
                                package_name=tl_fullname,
                                tl_package=tl_name,
                                config_module=config_module,
                            )
                            _ext_debug("\tExtends '%s' with config '%s'" %
                                       (_extension_points[idx], config_module))
                            packages_to_add.add(tl_fullname)
                        else:
                            # Check what directories we need to go down if any
                            if len(ext_list
                                   ) > 1 and prefix_match == ext_list[:-1]:
                                if next_dirs is None:
                                    next_dirs = []
                                next_dirs.append(ext_list[-1])
                    if next_dirs is not None:
                        dirs[:] = next_dirs[:]

        # Add all these new packages to the list of packages as well.
        packages_to_add = list(packages_to_add)
        packages_to_add.sort()
        mf_pkg_list.extend(packages_to_add)

    # Sanity check that we only have one package per configuration file
    errors = []
    for m, packages in config_to_pkg.items():
        if len(packages) > 1:
            errors.append(
                "\tPackages %s define the same configuration module '%s'" %
                (", and ".join(packages), m))
    if errors:
        raise RuntimeError("Conflicts in %s configuration files:\n%s" %
                           (EXT_PKG, "\n".join(errors)))

    extension_points_to_pkg.default_factory = None
    # Figure out the per extension point order
    for k, v in extension_points_to_pkg.items():
        l = [v[pkg].values() for pkg in mf_pkg_list if pkg in v]
        extension_points_to_pkg[k] = list(chain(*l))
    return mf_pkg_list, extension_points_to_pkg
Ejemplo n.º 20
0

"""
Objetivo:
    ...
"""

from importlib import metadata

def fonte():
    """

    """

# print([1], metadata.metadata("pip"))  # mostrar versão do metadata e um relatório
print([2], metadados := list(metadata.metadata("pip")))
print([3], len(metadata.files("pip")))              # 753
print([4], metadata.requires('pip'))                # pip install pip
print([5], metadata.requires('django'))             # pip install django
print([6], metadata.requires('django-bootstrap4'))  # pip install django-bootstrap4

# for x in metadados:
#     print('\033[1:32m' + 'metadata.metadata("pip")["' + f'{x}' + '"]' + '\033[m', metadata.metadata("pip")[f"{x}"])
#
# print(f'{tuple(enumerate([metadata.metadata("pip")[f"{x}"] for x in metadados]))}')
SPECIFIER_SET_REGEX = rf"(?:{SPECIFIER_REGEX})(?:[\s,]*{SPECIFIER_REGEX})*"

# Import gmpy2 to improve efficiency (for larger integers), if available.
gmpy2_version: Optional[Union[packaging.version.Version,
                              packaging.version.LegacyVersion]] = None
try:
    gmpy2_version = packaging.version.parse(version("gmpy2"))
except PackageNotFoundError:
    warnings.warn(
        "GMPY2 is not installed, however a significant performance improvement can be "
        "achieved by installing the GMPY2 library: "
        "'python -m pip install 'tno.mpc.encryption_schemes.utils[gmpy]'", )

USE_GMPY2 = False
if gmpy2_version is not None:
    DEPS = ";".join(requires(".".join(
        __name__.split(".")[:-1])))  # type: ignore[arg-type]
    gmpy2_spec_pattern = re.compile(
        f"gmpy2[^=~!<>]*?(?P<specs>({SPECIFIER_SET_REGEX}))")
    gmpy2_spec_match = gmpy2_spec_pattern.search(DEPS)
    if gmpy2_spec_match is None:
        raise ValueError(
            "Failed to extract optional gmpy2 version specifiers.")
    gmpy2_spec = SpecifierSet(gmpy2_spec_match.group("specs"))
    if gmpy2_version in gmpy2_spec:
        USE_GMPY2 = True
    else:
        warnings.warn(
            f"Efficiency gain is supported for gmpy2{gmpy2_spec}. Detected gmpy2 version "
            f"{gmpy2_version}. Fallback to non-gmpy2 support.")
Ejemplo n.º 22
0
from importlib import metadata
print(metadata.version("pip"))
# print(metadata.metadata('pip'))
print(list(metadata.metadata('pip')))
print(metadata.metadata('pip')['Home-page'])
print(len(metadata.files('pip')))
# print([p for p in metadata.files('pip') if p.suffix == '.py'])
init_path = [p for p in metadata.files('pip') if p.suffix == '.py'][0]
# print(init_path.read_text())
print(metadata.requires('requests'))
Ejemplo n.º 23
0
 def test_requires_dist_info(self):
     deps = list(requires('distinfo-pkg'))
     assert deps and all(deps)
     assert 'wheel >= 1.0' in deps
     assert "pytest; extra == 'test'" in deps
Ejemplo n.º 24
0
 def test_requires_egg_info(self):
     deps = requires('egginfo-pkg')
     assert len(deps) == 2
     assert any(dep == 'wheel >= 1.0; python_version >= "2.7"'
                for dep in deps)
 def test_requires_dist_info(self):
     deps = list(requires('distinfo-pkg'))
     assert deps and all(deps)
Ejemplo n.º 26
0
 def test_requires_egg_info_file(self):
     requirements = requires('egginfo-file')
     self.assertIsNone(requirements)
Ejemplo n.º 27
0
"""
Metadata
"""

from importlib import metadata

print(metadata.version('pip'))

metadados_pip = metadata.metadata('pip')

print(list(metadados_pip))

print(metadados_pip['Porject-URL'])

print(len(metadata.files('pip')))

print(metadata.requires('pip'))
Ejemplo n.º 28
0
 def test_requires_dist_info(self):
     deps = requires('distinfo-pkg')
     assert len(deps) == 2
     assert all(deps)
     assert 'wheel >= 1.0' in deps
     assert "pytest; extra == 'test'" in deps
Ejemplo n.º 29
0
# Example 01
from importlib import metadata
metadata.version("pip")

pip_metadata = metadata.metadata("pip")
list(pip_metadata)

pip_metadata["Home-page"]

pip_metadata["Requires-Python"]

len(metadata.files("pip"))

# Example 02
[p for p in metadata.files("realpython-reader") if p.suffix == ".py"]

init_path = _[0]  # Underscore access last returned value in the REPL
print(init_path.read_text())

# Example 03
metadata.requires("realpython-reader")