Example #1
0
def import_tentaclio_plugins(
    list_packages: PackageLister = packages_distributions().keys, ) -> None:
    """Find and import tentaclio plugins."""
    for package in list_packages():
        if package.startswith("tentaclio_"):
            logger.info(f"Importing plugin: {package}")
            importlib.import_module(package)
Example #2
0
 def test_packages_distributions_example2(self):
     """
     Test packages_distributions on a wheel built
     by trampolim.
     """
     self._fixture_on_path('example2-1.0.0-py3-none-any.whl')
     assert packages_distributions()['example2'] == ['example2']
Example #3
0
 def test_packages_distributions_neither_toplevel_nor_files(self):
     """
     Test a package built without 'top-level.txt' or a file list.
     """
     fixtures.build_files(
         {
             'trim_example-1.0.0.dist-info': {
                 'METADATA':
                 """
             Name: trim_example
             Version: 1.0.0
             """,
             }
         },
         prefix=self.site_dir,
     )
     packages_distributions()
Example #4
0
def get_packages_distributions():
    packages = importlib_metadata.packages_distributions()
    packages = list(x for x in packages)
    packages = list(filter(lambda x: not x[:1].isdigit(), packages))
    packages = list(filter(lambda x: not x.startswith('_'), packages))
    packages = list(filter(lambda x: not any(e in x for e in r'\/'), packages))
    packages = sorted(packages, key=lambda x: x.lower())
    return packages
def get_packages_distributions() -> TypeListStr:
    metadata = importlib_metadata.packages_distributions()
    packages = list(metadata)
    packages = list(filter(lambda x: not x[:1].isdigit(), packages))
    packages = list(filter(lambda x: not x.startswith('_'), packages))
    packages = list(filter(lambda x: all(e not in x for e in r'\/'), packages))
    packages = sorted(packages, key=lambda x: x.lower())
    return packages
Example #6
0
def test_infer_requirements_excludes_mlflow():
    with mock.patch(
            "mlflow.utils.requirements_utils._capture_imported_modules",
            return_value=["mlflow", "pytest"],
    ):
        mlflow_package = "mlflow-skinny" if "MLFLOW_SKINNY" in os.environ else "mlflow"
        assert mlflow_package in importlib_metadata.packages_distributions(
        )["mlflow"]
        assert _infer_requirements(
            "path/to/model", "sklearn") == [f"pytest=={pytest.__version__}"]
Example #7
0
def _init_modules_to_packages_map():
    global _MODULES_TO_PACKAGES
    if _MODULES_TO_PACKAGES is None and _PACKAGES_TO_MODULES is None:
        # Note `importlib_metada.packages_distributions` only captures packages installed into
        # Python’s site-packages directory via tools such as pip:
        # https://importlib-metadata.readthedocs.io/en/latest/using.html#using-importlib-metadata
        _MODULES_TO_PACKAGES = importlib_metadata.packages_distributions()

        # In Databricks, `_MODULES_TO_PACKAGES` doesn't contain pyspark since it's not installed
        # via pip or conda. To work around this issue, manually add pyspark.
        if is_in_databricks_runtime():
            _MODULES_TO_PACKAGES.update({"pyspark": ["pyspark"]})
Example #8
0
def _infer_requirements(model_uri, flavor):
    """
    Infers the pip requirements of the specified model by creating a subprocess and loading
    the model in it to determine which packages are imported.

    :param model_uri: The URI of the model.
    :param: flavor: The flavor name of the model.
    :return: A list of inferred pip requirements.
    """
    global _MODULES_TO_PACKAGES
    if _MODULES_TO_PACKAGES is None:
        # Note `importlib_metada.packages_distributions` only captures packages installed into
        # Python’s site-packages directory via tools such as pip:
        # https://importlib-metadata.readthedocs.io/en/latest/using.html#using-importlib-metadata
        _MODULES_TO_PACKAGES = importlib_metadata.packages_distributions()

        # In Databricks, `_MODULES_TO_PACKAGES` doesn't contain pyspark since it's not installed
        # via pip or conda. To work around this issue, manually add pyspark.
        if is_in_databricks_runtime():
            _MODULES_TO_PACKAGES.update({"pyspark": ["pyspark"]})

    global _PYPI_PACKAGE_INDEX
    if _PYPI_PACKAGE_INDEX is None:
        _PYPI_PACKAGE_INDEX = _load_pypi_package_index()

    modules = _capture_imported_modules(model_uri, flavor)
    packages = _flatten(
        [_MODULES_TO_PACKAGES.get(module, []) for module in modules])
    packages = map(_normalize_package_name, packages)
    packages = _prune_packages(packages)
    excluded_packages = [
        # Certain packages (e.g. scikit-learn 0.24.2) imports `setuptools` or `pkg_resources`
        # (a module provided by `setuptools`) to process or interact with package metadata.
        # It should be safe to exclude `setuptools` because it's rare to encounter a python
        # environment where `setuptools` is not pre-installed.
        "setuptools",
        # Exclude a package that provides the mlflow module (e.g. mlflow, mlflow-skinny).
        # Certain flavors (e.g. pytorch) import mlflow while loading a model, but mlflow should
        # not be counted as a model requirement.
        *_MODULES_TO_PACKAGES.get("mlflow", []),
    ]
    packages = packages - set(excluded_packages)
    unrecognized_packages = packages - _PYPI_PACKAGE_INDEX.package_names
    if unrecognized_packages:
        _logger.warning(
            "The following packages were not found in the public PyPI package index as of"
            " %s; if these packages are not present in the public PyPI index, you must install"
            " them manually before loading your model: %s",
            _PYPI_PACKAGE_INDEX.date,
            unrecognized_packages,
        )
    return sorted(map(_get_pinned_requirement, packages))
Example #9
0
def get_distributions():
    """
    Get a mapping of top-level packages to their distributions.
    """

    return packages_distributions()
Example #10
0
 def test_packages_distributions_example(self):
     self._fixture_on_path('example-21.12-py3-none-any.whl')
     assert packages_distributions()['example'] == ['example']
Example #11
0
            abs_path = os.path.join(os.path.dirname(requirements_file),
                                    req_file)
            yield from _parse_requirements(abs_path, is_constraint=True)
        else:
            yield _Requirement(line, is_constraint)


def _flatten(iterable):
    return chain.from_iterable(iterable)


def _canonicalize_package_name(pkg_name):
    return pkg_name.lower().replace("_", "-")


_MODULE_TO_PACKAGES = importlib_metadata.packages_distributions()


def _module_to_packages(module_name):
    """
    Returns a list of packages that provide the specified module.
    """
    return _MODULE_TO_PACKAGES.get(module_name, [])


def _get_requires_recursive(pkg_name):
    """
    Recursively yields both direct and transitive dependencies of the specified package.
    """
    if pkg_name not in pkg_resources.working_set.by_key:
        return
Example #12
0
def get_distributions():
    return packages_distributions()