Ejemplo n.º 1
0
    def _harvest_plugins_in_eggs(self, application):
        """ Harvest plugins found in eggs on the plugin path. """

        # We first add the eggs to a local working set so that when we get
        # the plugin entry points we don't pick up any from other eggs
        # installed on sys.path.
        plugin_working_set = pkg_resources.WorkingSet(self.plugin_path)
        add_eggs_on_path(plugin_working_set, self.plugin_path,
                         self._handle_broken_distributions)

        # We also add the eggs to the global working set as otherwise the
        # plugin classes can't be imported!
        add_eggs_on_path(pkg_resources.working_set, self.plugin_path,
                         self._handle_broken_distributions)

        plugins = []
        for entry_point in self._get_plugin_entry_points(plugin_working_set):
            if self._include_plugin(entry_point.name):
                try:
                    plugin = self._create_plugin_from_entry_point(entry_point,
                                                                  application)
                    plugins.append(plugin)
                except Exception as exc:
                    exc_tb = traceback.format_exc()
                    msg = 'Error loading plugin: %s (from %s)\n%s'\
                        %(entry_point.name, entry_point.dist.location, exc_tb)
                    logger.error(msg)
                    self.on_broken_plugin(entry_point, exc)

        return plugins
Ejemplo n.º 2
0
def _protobuf3():
    ws = pkg_resources.WorkingSet()
    try:
        ws.require('protobuf>=3.0.0a')
        return True
    except pkg_resources.VersionConflict:
        return False
Ejemplo n.º 3
0
def with_requires(*requirements):
    """Run a test case only when given requirements are satisfied.

    .. admonition:: Example

       This test case runs only when `numpy>=1.10` is installed.

       >>> from ideep import testing
       ... class Test(unittest.TestCase):
       ...     @testing.with_requires('numpy>=1.10')
       ...     def test_for_numpy_1_10(self):
       ...         pass

    Args:
        requirements: A list of string representing requirement condition to
            run a given test case.

    """
    ws = pkg_resources.WorkingSet()
    try:
        ws.require(*requirements)
        skip = False
    except pkg_resources.VersionConflict:
        skip = True

    msg = 'requires: {}'.format(','.join(requirements))
    return unittest.skipIf(skip, msg)
Ejemplo n.º 4
0
def initialize_pkg_resources():
    import pkg_resources

    def distribution_finder(finder, entry, only):
        for name in finder.listdir(""):
            if is_dist_info(name):
                yield pkg_resources.Distribution.from_location(entry, name)

    pkg_resources.register_finder(AssetFinder, distribution_finder)
    pkg_resources.working_set = pkg_resources.WorkingSet()

    class AssetProvider(pkg_resources.NullProvider):
        def __init__(self, module):
            super().__init__(module)
            self.finder = self.loader.finder

        def _has(self, path):
            return self.finder.exists(self.finder.zip_path(path))

        def _isdir(self, path):
            return self.finder.isdir(self.finder.zip_path(path))

        def _listdir(self, path):
            return self.finder.listdir(self.finder.zip_path(path))

    pkg_resources.register_loader_type(AssetLoader, AssetProvider)
Ejemplo n.º 5
0
    def _directory_import(self):
        """
        Import astropy_helpers from the given path, which will be added to
        sys.path.

        Must return True if the import succeeded, and False otherwise.
        """

        # Return True on success, False on failure but download is allowed, and
        # otherwise raise SystemExit
        path = os.path.abspath(self.path)

        # Use an empty WorkingSet rather than the man
        # pkg_resources.working_set, since on older versions of setuptools this
        # will invoke a VersionConflict when trying to install an upgrade
        ws = pkg_resources.WorkingSet([])
        ws.add_entry(path)
        dist = ws.by_key.get(DIST_NAME)

        if dist is None:
            # We didn't find an egg-info/dist-info in the given path, but if a
            # setup.py exists we can generate it
            setup_py = os.path.join(path, "setup.py")
            if os.path.isfile(setup_py):
                # We use subprocess instead of run_setup from setuptools to
                # avoid segmentation faults - see the following for more details:
                # https://github.com/cython/cython/issues/2104
                sp.check_output([sys.executable, "setup.py", "egg_info"],
                                cwd=path)

                for dist in pkg_resources.find_distributions(path, True):
                    # There should be only one...
                    return dist

        return dist
Ejemplo n.º 6
0
def install_missing_requirements(module):
    """
    Some of the modules require external packages to be installed. This gets
    the list from the `REQUIREMENTS` module attribute and attempts to
    install the requirements using pip.
    :param module: GPIO module
    :type module: ModuleType
    :return: None
    :rtype: NoneType
    """
    reqs = getattr(module, "REQUIREMENTS", [])
    if not reqs:
        _LOG.info("Module %r has no extra requirements to install." % module)
        return
    import pkg_resources
    pkgs_installed = pkg_resources.WorkingSet()
    pkgs_required = []
    for req in reqs:
        if pkgs_installed.find(pkg_resources.Requirement.parse(req)) is None:
            pkgs_required.append(req)
    if pkgs_required:
        from pip.commands.install import InstallCommand
        from pip.status_codes import SUCCESS
        cmd = InstallCommand()
        result = cmd.main(pkgs_required)
        if result != SUCCESS:
            raise CannotInstallModuleRequirements(
                "Unable to install packages for module %r (%s)..." %
                (module, pkgs_required))
Ejemplo n.º 7
0
def bootstrap(self, args):

    self._setup_directories()

    # Now copy buildout and setuptools eggs, and record destination eggs:
    entries = []
    for name in 'setuptools', 'zc.buildout':
        r = pkg_resources.Requirement.parse(name)
        dist = pkg_resources.working_set.find(r)
        if dist.precedence == pkg_resources.DEVELOP_DIST:
            dest = os.path.join(self['buildout']['develop-eggs-directory'],
                                name+'.egg-link')
            open(dest, 'w').write(dist.location)
            entries.append(dist.location)
        else:
            dest = os.path.join(self['buildout']['eggs-directory'],
                                os.path.basename(dist.location))
            entries.append(dest)
            if not os.path.exists(dest):
                if os.path.isdir(dist.location):
                    shutil.copytree(dist.location, dest)
                else:
                    shutil.copy2(dist.location, dest)

    # Create buildout script
    ws = pkg_resources.WorkingSet(entries)
    ws.require('zc.buildout')
    zc.buildout.easy_install.scripts(
        ['zc.buildout'], ws, sys.executable,
            self['buildout']['bin-directory'])
Ejemplo n.º 8
0
def skipif_requires_satisfied(*requirements):
    ws = pkg_resources.WorkingSet()
    try:
        ws.require(*requirements)
    except pkg_resources.ResolutionError:
        return False
    return skipif(True)
Ejemplo n.º 9
0
 def get_version_information(self, version, mod_info):
     """ get version information """
     mod_list = pkg_resources.WorkingSet()
     items = list(filter(lambda x: x.project_name.startswith(version.name), mod_list))
     if not items:
         version.current_version = items[0].version
     return version
Ejemplo n.º 10
0
 def __init__(self, default=None, cache=50, container=None, **kw):
     """Engines creates a Resolver instance and configures Distribute
     entry point iteration.
     
     The container argument allows you to supply an on-disk path
     which can contain .egg packages to search for engines.
     
     You may pass additional keyword arguments to pre-configure
     engine options.  Engine options will either be merged with
     the options passed at runtime or passed during engine
     initialization depending on if the engine is a simple callable
     or class, respectively.
     """
     
     super(Engines, self).__init__()
     
     self.resolve = Resolver(default, cache)
     self.options = collections.defaultdict(dict)
     self.options.update(kw)
     
     collection = pkg_resources.WorkingSet()
     
     if container:
         collection.add_entry(container)
     
     collection.subscribe(self._distributions)
Ejemplo n.º 11
0
def working_set(buildout):
    """Creates and returns a new working set based on user prefixes and existing
  packages already installed"""

    working_set = pkg_resources.WorkingSet([])

    # add development directory first
    dev_dir = buildout['develop-eggs-directory']
    for path in fnmatch.filter(os.listdir(dev_dir), '*.egg-link'):
        full_path = os.path.join(dev_dir, path)
        python_path = open(full_path, 'rt').read().split('\n')[0]
        distro = None
        wants = os.path.splitext(path)[0]
        distro = [k for k in pkg_resources.find_distributions(python_path) \
            if k.project_name == wants]
        if not distro:
            raise RuntimeError("Could not find a distribution for `%s' under `%s'" \
                " - check egg-link at `%s'" % (wants, python_path, full_path))
        working_set.add(distro[0])

    # add all egg directories, newest first
    for path in order_egg_dirs(buildout):
        working_set.add_entry(path)

    # adds the user paths
    for path in find_site_packages(get_prefixes(buildout)):
        if has_distribution(path) and path not in working_set.entries:
            working_set.add_entry(path)

    # finally, adds the system path
    for path in site.sys.path:
        if has_distribution(path) and path not in working_set.entries:
            working_set.add_entry(path)

    return working_set
Ejemplo n.º 12
0
def reload_module(module_name):
    if module_name not in module_ep_map:
        raise ValueError("{} is not a plugin module".format(module_name))
    module = sys.modules.get(module_name)
    dist = module_dist_map[module_name]
    if module:
        old_plugins = _get_plugins(module)
        # remove module attrs just in case plugin classes were removed
        for attr in dir(module):
            if attr not in ('__name__', '__file__'):
                delattr(module, attr)
        try:
            module = importlib.reload(module)
        except ImportError as e:
            print("ERROR: ", e)
            return
        except SyntaxError:
            print("SYNTAX ERROR in {} ({})".format(module_name, dist))
            return
    new_plugins = _get_plugins(module)
    removed_plugins = old_plugins - new_plugins
    added_plugins = new_plugins - old_plugins
    _update_ep_file(dist, module_name, added_plugins, removed_plugins)
    if removed_plugins or added_plugins:
        global ws
        ws = pkg_resources.WorkingSet()
Ejemplo n.º 13
0
def __get_extra_extension_classes(paths):
    """
    Banana banana
    """
    extra_classes = []
    wset = pkg_resources.WorkingSet([])
    distributions, _ = wset.find_plugins(pkg_resources.Environment(paths))

    for dist in distributions:
        sys.path.append(dist.location)
        wset.add(dist)

    for entry_point in wset.iter_entry_points(group='hotdoc.extensions',
                                              name='get_extension_classes'):
        try:
            activation_function = entry_point.load()
            classes = activation_function()
        # pylint: disable=broad-except
        except Exception as exc:
            print("Failed to load %s %s" % (entry_point.module_name, exc))
            traceback.print_exc()
            continue

        for klass in classes:
            extra_classes.append(klass)

    return extra_classes
Ejemplo n.º 14
0
    def setup_runtime(self):
        make_dummy_dist(self, (('bower.json',
                                json.dumps({
                                    'name': 'site',
                                    'dependencies': {
                                        'jquery': '~3.1.0',
                                    },
                                })), ), 'example.package1', '1.0')

        make_dummy_dist(self, (('bower.json',
                                json.dumps({
                                    'name': 'site',
                                    'dependencies': {
                                        'underscore': '~1.8.3',
                                    },
                                })), ), 'example.package2', '2.0')

        working_set = pkg_resources.WorkingSet([self._calmjs_testing_tmpdir])

        # Stub out the underlying data needed for the cli for the tests
        # to test against our custom data for reproducibility.
        stub_item_attr_value(self, dist, 'default_working_set', working_set)
        stub_mod_check_interactive(self, [cli], True)

        # Of course, apply a mock working set for the runtime instance
        # so it can use the bower runtime.
        working_set = mocks.WorkingSet({
            'calmjs.runtime': [
                'bower = calmjs.bower:bower.runtime',
            ],
        })
        return runtime.Runtime(working_set=working_set)
Ejemplo n.º 15
0
def find_site_path(pkg, site_dir=None):
    import pkg_resources
    if site_dir is not None:
        site_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    working_set = pkg_resources.WorkingSet([site_dir] + sys.path[:])
    for dist in working_set:
        root = dist.location
        base_name = dist.project_name if dist.project_name else dist.key
        name = None
        if "top_level.txt" in dist.metadata_listdir(""):
            name = next(
                iter([
                    l.strip() for l in dist.get_metadata_lines("top_level.txt")
                    if l is not None
                ]), None)
        if name is None:
            name = pkg_resources.safe_name(base_name).replace("-", "_")
        if not any(pkg == _ for _ in [base_name, name]):
            continue
        path_options = [name, "{0}.py".format(name)]
        path_options = [
            os.path.join(root, p) for p in path_options if p is not None
        ]
        path = next(iter(p for p in path_options if os.path.exists(p)), None)
        if path is not None:
            return (dist, path)
    return (None, None)
Ejemplo n.º 16
0
 def test_missing_distribution(self):
     d_egg_root = join(mkdtemp(self), 'dummyns')
     make_dummy_dist(self, (
         (
             'namespace_packages.txt',
             'not_ns\n',
         ),
         (
             'entry_points.txt',
             '[dummyns]\n'
             'dummyns = dummyns:attr\n',
         ),
     ),
                     'dummyns',
                     '2.0',
                     working_dir=d_egg_root)
     working_set = pkg_resources.WorkingSet([
         d_egg_root,
         self.ds_egg_root,
     ])
     dummyns_ep = next(working_set.iter_entry_points('dummyns'))
     with pretty_logging(stream=StringIO()) as fd:
         p = indexer.resource_filename_mod_entry_point(
             'dummyns', dummyns_ep)
     # not stubbed working_set, so this is derived using fallback
     # value from the sys.modules['dummyns'] location
     self.assertEqual(normcase(p), normcase(self.dummyns_path))
     self.assertIn("distribution 'dummyns 2.0' not found", fd.getvalue())
Ejemplo n.º 17
0
 def append_item(self, items_dict, key, dist=None):
     active = dist in pkg_resources.WorkingSet() if dist else None
     data = dict(key=key, active=active, dist=dist)
     label = self.get_label(data)
     style = self.get_style(data)
     items_dict[label] = data
     items_dict[label]['_item'] = style
def _fetch():
    global _distributions

    if not _distributions:
        _distributions = {}
        for dist in pkg_resources.WorkingSet():
            _distributions[dist.key] = dist
Ejemplo n.º 19
0
 def test_pip_install_sdist(self):
     working_set = pkg_resources.WorkingSet(sys.path)
     installer = pip_installer.PipInstaller(self.dst, working_set)
     installer.sdist = self.sdist
     installer.wheel = self.wheel
     dist = installer(self.req)
     self.assertTrue(dist in self.req)
Ejemplo n.º 20
0
Archivo: del_.py Proyecto: jayvdb/zetup
def del_(zfg, args=None):
    """Delete project from python environment."""
    try:  # check for conda
        conda_info = conda.info()
    except OSError:  # ==> no conda
        pass
    else:
        # are we in a conda environment?
        if any(
                Path(conda_info[key]).samefile(sys.prefix)
                for key in ['root_prefix', 'default_prefix']
                # and is project installed via conda?
        ) and conda.list('--no-pip', '--full-name', zfg.NAME):
            # then also remove it via conda
            status = conda.remove(zfg.NAME, json=False)
            if status:  # ==> error
                return status
    # is there some project (develop) install (left) to be removed via pip?
    while True:
        try:
            # always use a refreshed working set
            # (interface to installed python package distributions)
            dist = pkg_resources.WorkingSet().by_key[zfg.NAME]
        except KeyError:  # ==> nothing left to uninstall
            break
        status = pip(['uninstall', zfg.NAME, '--yes'], raise_=False)
        if status:  # ==> error
            return status
        root = Path(dist.location)
        if root.exists() and root.samefile(zfg.ZETUP_DIR):
            # pip doesn't remove local .egg-info/ dirs of develop installs
            egg_info = Path(dist._provider.egg_info).realpath()
            print("zetup: Removing %s%s" % (egg_info, os.path.sep))
            egg_info.rmtree()
Ejemplo n.º 21
0
    def __init__(self, working_set=None, plugin_names=None):
        self.working_set = working_set or pkg_resources.WorkingSet()

        if plugin_names is None:
            entry_points = self.available_plugins()
        else:
            entry_points = []
            for name in plugin_names:
                try:
                    entry_point = self.available_plugins(name).next()
                except StopIteration:
                    print >> sys.stderr, ("Unable to locate plugin "
                                          "%s. Skipping." % name)
                    continue
                else:
                    entry_points.append(entry_point)

        self.plugins = OrderedDict()
        for entry_point in entry_points:
            try:
                plugin_cls = entry_point.load()
            except Exception as e:
                if plugin_names:
                    # if this plugin was specifically requested, fail.
                    raise e
                else:
                    print >> sys.stderr, ("Error loading plugin %s (%s)."
                                          " Skipping." % (entry_point.name, e))
                    continue
            self.plugins[entry_point.name] = plugin_cls(entry_point)
Ejemplo n.º 22
0
def install_missing_requirements(module):
    """
    Some of the modules require external packages to be installed. This gets
    the list from the `REQUIREMENTS` module attribute and attempts to
    install the requirements using pip.
    :param module: GPIO module
    :type module: ModuleType
    :return: None
    :rtype: NoneType
    """
    reqs = getattr(module, "REQUIREMENTS", [])
    if not reqs:
        _LOG.info("Module %r has no extra requirements to install." % module)
        return
    import pkg_resources
    pkgs_installed = pkg_resources.WorkingSet()
    pkgs_required = []
    for req in reqs:
        if pkgs_installed.find(pkg_resources.Requirement.parse(req)) is None:
            pkgs_required.append(req)
    if pkgs_required:
        from subprocess import check_call, CalledProcessError
        try:
            check_call(['/usr/bin/env', 'pip', 'install'] + pkgs_required)
        except CalledProcessError as err:
            raise CannotInstallModuleRequirements(
                "Unable to install packages for module %r (%s): %s" %
                (module, pkgs_required, err))
Ejemplo n.º 23
0
    def finalize_options(self):
        if self.bdist_dir is None:
            bdist_base = self.get_finalized_command("bdist").bdist_base
            self.bdist_dir = os.path.join(bdist_base, "xar")
        if self.dist_dir is None:
            script_name = os.path.expanduser(self.distribution.script_name)
            package_dir = os.path.dirname(os.path.realpath(script_name))
            self.dist_dir = os.path.join(package_dir, "dist")
        if self.console_scripts is not None:
            self.console_scripts = self.console_scripts.strip().split(",")
        self.sqopts = xar_util.SquashfsOptions()
        if self.xar_compression_algorithm is not None:
            self.sqopts.compression_algorithm = self.xar_compression_algorithm
        else:
            self.sqopts.compression_algorithm = "gzip"
        if self.xar_block_size is not None:
            self.sqopts.block_size = self.xar_block_size
        if self.xar_zstd_level is not None:
            self.sqopts.zstd_level = self.xar_zstd_level
        self.xar_outputs = []

        self.working_set = pkg_resources.WorkingSet(sys.path)
        self.installer = None
        if self.download:
            bdist_pip = os.path.join(self.bdist_dir, "downloads")
            mkpath(bdist_pip)
            self.installer = pip_installer.PipInstaller(
                bdist_pip, self.working_set, log)
Ejemplo n.º 24
0
    def setUp(self):
        # Get the location of the mock plugin module metadata
        mock_module_path = os.path.join(
            baseline_reader.path_to_baseline_directory(),
            'plugin_module',
        )
        self.mock_module_manifest_path = os.path.join(
            mock_module_path,
            "otio_jsonplugin",
            "plugin_manifest.json"
        )

        # Create a WorkingSet as if the module were installed
        entries = [mock_module_path] + pkg_resources.working_set.entries

        self.sys_patch = mock.patch('sys.path', entries)
        self.sys_patch.start()

        working_set = pkg_resources.WorkingSet(entries)

        # linker from the entry point
        self.entry_patcher = mock.patch(
            'pkg_resources.iter_entry_points',
            working_set.iter_entry_points
        )
        self.entry_patcher.start()
 def run(self):
     doc = ViewList()
     req_string = pkg_resources.Requirement.parse(self.module)
     active = pkg_resources.WorkingSet().find(req_string)
     if not active:
         msg = 'includechangelog - package "{0}" not found.'.format(
             self.module)
         return [self.state.document.reporter.error(msg)]
     packageInfos = active._get_metadata('PKG-INFO')
     addLine = False
     doc.append(u'', '<includedoc>')
     for line in packageInfos:
         if 'Platform: ' in line:
             break
         if addLine or line in ['Changes', 'Changelog']:
             addLine = True
             doc.append(line.decode('utf-8'), '<includedoc>')
     doc.append(u'', '<includedoc>')
     node = section()
     surrounding_title_styles = self.state.memo.title_styles
     surrounding_section_level = self.state.memo.section_level
     self.state.memo.title_styles = []
     self.state.memo.section_level = 0
     self.state.nested_parse(doc, 0, node, match_titles=1)
     self.state.memo.title_styles = surrounding_title_styles
     self.state.memo.section_level = surrounding_section_level
     return node.children
Ejemplo n.º 26
0
def get_distributions(show, pkg_name='', version=''):
    """Yield installed packages.

    @param show: Type of package(s) to show; active, non-active or all
    @type show: string: "active", "non-active", "all"

    @param pkg_name: PyPI project name
    @type pkg_name: string

    @param version: project's PyPI version
    @type version: string

    @returns: yields tuples of distribution and True or False depending
              on active state. e.g. (dist, True)

    """
    environment = pkg_resources.Environment()
    working_set = pkg_resources.WorkingSet()
    # "name" is a placeholder for the sorted list.
    for name, dist in get_alpha(show, pkg_name, version):
        ver = dist.version
        for package in environment[dist.project_name]:
            if ver == package.version:
                if show == 'nonactive' and dist not in working_set:
                    yield (dist, query_activated(dist))
                elif show == 'active' and dist in working_set:
                    yield (dist, query_activated(dist))
                elif show == 'all':
                    yield (dist, query_activated(dist))
Ejemplo n.º 27
0
def with_requires(*requirements):
    """Run a test case only when given requirements are satisfied.

    .. admonition:: Example

       This test case runs only when `numpy>=1.18` is installed.

       >>> from dpnp import testing
       ... class Test(unittest.TestCase):
       ...     @testing.with_requires('numpy>=1.18')
       ...     def test_for_numpy_1_18(self):
       ...         pass

    Args:
        requirements: A list of string representing requirement condition to
            run a given test case.

    """
    # Delay import of pkg_resources because it is excruciatingly slow.
    # See https://github.com/pypa/setuptools/issues/510
    import pkg_resources

    ws = pkg_resources.WorkingSet()
    try:
        ws.require(*requirements)
        skip = False
    except pkg_resources.ResolutionError:
        skip = True

    msg = 'requires: {}'.format(','.join(requirements))
    return unittest.skipIf(skip, msg)
Ejemplo n.º 28
0
    def __init__(
        self,
        working_set=None,
        show=lambda name: True,
        follow=lambda name: True,
        extras=True,
    ):
        """Create a dependency graph.

        The graph is initially empty. To populate it with nodes and edges call
        either ``from_working_set`` or ``from_specifications``.

        You can specify filtering functions:

          show(name): Returns True if the package with the given name and its
                      dependencies ought to be included in the graph.

          follow(name): Return True if the package with the given name ought
                        to have its dependencies parsed and included in the
                        graph.

        You can also indicate whether you want to process information about
        extra dependencies: if extras is False, all information about extras
        will be discarded.

        """
        self.working_set = working_set or pkg_resources.WorkingSet()
        self.show = show
        self.follow = follow
        self.extras = extras
        self.roots = ()
Ejemplo n.º 29
0
def _directory_import(path):
    """
    Import astropy_helpers from the given path, which will be added to
    sys.path.

    Must return True if the import succeeded, and False otherwise.
    """

    # Return True on success, False on failure but download is allowed, and
    # otherwise raise SystemExit
    path = os.path.abspath(path)

    # Use an empty WorkingSet rather than the man pkg_resources.working_set,
    # since on older versions of setuptools this will invoke a VersionConflict
    # when trying to install an upgrade
    ws = pkg_resources.WorkingSet([])
    ws.add_entry(path)
    dist = ws.by_key.get(DIST_NAME)

    if dist is None:
        # We didn't find an egg-info/dist-info in the given path, but if a
        # setup.py exists we can generate it
        setup_py = os.path.join(path, 'setup.py')
        if os.path.isfile(setup_py):
            with _silence():
                run_setup(os.path.join(path, 'setup.py'), ['egg_info'])

            for dist in pkg_resources.find_distributions(path, True):
                # There should be only one...
                return dist

    return dist
Ejemplo n.º 30
0
def initialize_pkg_resources():
    # Because so much code requires pkg_resources without declaring setuptools as a dependency,
    # we include it in the bootstrap ZIP. We don't include the rest of setuptools, because it's
    # much larger and much less likely to be useful. If the user installs setuptools via pip,
    # then that copy of pkg_resources will take priority because the requirements ZIP is
    # earlier on sys.path.
    import pkg_resources

    def distribution_finder(finder, entry, only):
        for name in finder.listdir(""):
            if name.endswith(".dist-info"):
                yield pkg_resources.Distribution.from_location(entry, name)

    pkg_resources.register_finder(AssetFinder, distribution_finder)
    pkg_resources.working_set = pkg_resources.WorkingSet()

    class AssetProvider(pkg_resources.NullProvider):
        def __init__(self, module):
            super().__init__(module)
            self.finder = self.loader.finder

        def _has(self, path):
            return self.finder.exists(self.finder.zip_path(path))

        def _isdir(self, path):
            return self.finder.isdir(self.finder.zip_path(path))

        def _listdir(self, path):
            return self.finder.listdir(self.finder.zip_path(path))

    pkg_resources.register_loader_type(AssetLoader, AssetProvider)