Пример #1
0
 def __init__(
         self, index_url="https://pypi.python.org/simple", hosts=('*',),
         ca_bundle=None, verify_ssl=True, *args, **kw
         ):
     Environment.__init__(self,*args,**kw)
     self.index_url = index_url + "/"[:not index_url.endswith('/')]
     self.scanned_urls = {}
     self.fetched_urls = {}
     self.package_pages = {}
     self.allows = re.compile('|'.join(map(translate,hosts))).match
     self.to_scan = []
     if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()):
         self.opener = ssl_support.opener_for(ca_bundle)
Пример #2
0
	def _init_environment(self):
		dist_folders = map(lambda d: os.path.join(os.getcwd(), self.mod_folder, d), os.listdir(self.mod_folder))
		dist_folders = filter(lambda f: os.path.exists(os.path.join(f, "EGG-INFO")), dist_folders)
		dists = map(lambda f: Distribution.from_filename(f) , dist_folders)
		#
		self.pkg_env = Environment()
		for dist in dists: self.pkg_env.add(dist)
Пример #3
0
 def __init__(
         self, index_url="https://pypi.org/simple/", hosts=('*',),
         ca_bundle=None, verify_ssl=True, *args, **kw
 ):
     Environment.__init__(self, *args, **kw)
     self.index_url = index_url + "/" [:not index_url.endswith('/')]
     self.scanned_urls = {}
     self.fetched_urls = {}
     self.package_pages = {}
     self.allows = re.compile('|'.join(map(translate, hosts))).match
     self.to_scan = []
     use_ssl = (
         verify_ssl
         and ssl_support.is_available
         and (ca_bundle or ssl_support.find_ca_bundle())
     )
     if use_ssl:
         self.opener = ssl_support.opener_for(ca_bundle)
     else:
         self.opener = urllib.request.urlopen
Пример #4
0
  def checker_pex(self, interpreter):
    # TODO(John Sirois): Formalize in pants.base?
    pants_dev_mode = os.environ.get('PANTS_DEV')

    if pants_dev_mode:
      checker_id = self.checker_target.transitive_invalidation_hash()
    else:
      checker_id = hash_all([self._CHECKER_REQ])

    pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity))

    if not os.path.exists(pex_path):
      with self.context.new_workunit(name='build-checker'):
        with safe_concurrent_creation(pex_path) as chroot:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=chroot, interpreter=interpreter),
            log=self.context.log)

          # Constraining is required to guard against the case where the user
          # has a pexrc file set.
          pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement))

          if pants_dev_mode:
            pex_builder.add_sources_from(self.checker_target)
            req_libs = [tgt for tgt in self.checker_target.closure()
                        if isinstance(tgt, PythonRequirementLibrary)]

            pex_builder.add_requirement_libs_from(req_libs=req_libs)
          else:
            try:
              # The checker is already on sys.path, eg: embedded in pants.pex.
              platform = Platform.current()
              platform_name = platform.platform
              env = Environment(search_path=sys.path,
                                platform=platform_name,
                                python=interpreter.version_string)
              working_set = WorkingSet(entries=sys.path)
              for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env):
                pex_builder.add_direct_requirements(dist.requires())
                # NB: We add the dist location instead of the dist itself to make sure its a
                # distribution style pex knows how to package.
                pex_builder.add_dist_location(dist.location)
              pex_builder.add_direct_requirements([self._CHECKER_REQ])
            except (DistributionNotFound, PEXBuilder.InvalidDistribution):
              # We need to resolve the checker from a local or remote distribution repo.
              pex_builder.add_resolved_requirements(
                [PythonRequirement(self._CHECKER_REQ)])

          pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
          pex_builder.freeze()

    return PEX(pex_path, interpreter=interpreter)
Пример #5
0
def main():
    failed = []

    print('Getting environment')
    env = Environment()

    for pkgname in env:
        print('Checking %s' % pkgname)
        if pkgname in PROJECTS['bad']:
            print('\tMarked as BAD. FAIL')
            failed.append(pkgname)
            continue
        elif pkgname in PROJECTS['good']:
            print('\tMarked as GOOD. PASS')
            continue

        for pkg in env[pkgname]:
            had_conclusive_result = False
            print('\tChecking version %s' % pkg.version)
            for check in CHECKS:
                print('\t\tRunning check: %s...' % check.__name__)
                res = check(pkg)
                if res is True:
                    print('\t\tPASS')
                    had_conclusive_result = True
                    break
                elif res is False:
                    print('\t\tFAIL')
                    had_conclusive_result = True
                    failed.append(pkg)
                    break
                elif res is None:
                    print('\t\t\tINCONCLUSIVE')
                    continue
                else:
                    print('\t\t\tINVALID RESULT: %s' % res)
                    failed.append(pkg)
                    break

            if not had_conclusive_result:
                failed.append(pkg)
                print('\t\tNO CONCLUSIVE RESULTS. FAIL')

    print()
    if failed:
        print('At least one package failed.')
        for fail in failed:
            print('\tFailure: %s' % fail)
        sys.exit(1)
    else:
        print('All packages passed license check')
Пример #6
0
def clean_version():
    """ Keep only most recent version of each package """

    env = Environment()

    for project_name in env._distmap.keys():

        installed_version = [d.version for d in env[project_name]]
        if (installed_version):
            max_version = max(installed_version, key=parse_version)

            for dist in env[project_name]:
                if parse_version(dist.version) < parse_version(max_version):
                    remove_egg(project_name, dist)
Пример #7
0
def test_list_one_installed(environment: Environment):
    env = Environment(search_path=[])
    environment.return_value = env
    metadata = """Metadata-Version: 2.1
Name: chaostoolkit-some-stuff
Version: 0.1.0
Summary: Chaos Toolkit some package
Home-page: http://chaostoolkit.org
Author: chaostoolkit Team
Author-email: [email protected]
License: Apache License 2.0
"""

    env.add(
        Distribution(project_name="chaostoolkit-some-stuff",
                     version="0.1.0",
                     metadata=InMemoryMetadata({"PKG-INFO": metadata})))
    extensions = list_extensions()
    assert len(extensions) == 1

    ext = extensions[0]
    assert ext.name == "chaostoolkit-some-stuff"
    assert ext.version == "0.1.0"
Пример #8
0
 def run(self):
     self.run_command('build_tests')
     this_dir = os.path.normpath(
         os.path.abspath(os.path.dirname(__file__)))
     lib_dirs = glob.glob(os.path.join(this_dir, 'build', 'lib*'))
     test_dir = os.path.join(this_dir, 'build', 'tests')
     env = Environment(search_path=lib_dirs)
     distributions = env["nose"]
     assert len(distributions) == 1, (
         "Incorrect number of distributions found")
     dist = distributions[0]
     dist.activate()
     sys.path.insert(0, test_dir)
     setuptools.command.test.test.run(self)
Пример #9
0
def main():
    """ Run the application. """

    # Find all additional eggs.
    environment = Environment(EGG_PATH)

    distributions, errors = working_set.find_plugins(environment)
    if len(errors) > 0:
        raise SystemError('cannot add eggs %s' % errors)

    logger.debug('added eggs %s' % distributions)

    # Add them to the working set.
    map(working_set.add, distributions)

    # Create and run the application.
    return run()
Пример #10
0
    def finalize_options(self):
        if self.version:
            print 'distribute %s' % get_distribution('distribute').version
            sys.exit()

        py_version = sys.version.split()[0]

        self.config_vars = {
            'dist_name': self.distribution.get_name(),
            'dist_version': self.distribution.get_version(),
            'dist_fullname': self.distribution.get_fullname(),
            'py_version': py_version,
            'py_version_short': py_version[0:3],
            'py_version_nodot': py_version[0] + py_version[2],
        }

        self._expand('install_dir')

        normpath = map(normalize_path, sys.path)

        self.index_url = self.index_url or "http://pypi.python.org/simple"

        hosts = ['*']
        if self.package_index is None:
            self.package_index = self.create_index(
                self.index_url,
                hosts=hosts,
            )
        self.local_index = Environment(sys.path)

        if self.find_links is not None:
            if isinstance(self.find_links, basestring):
                self.find_links = self.find_links.split()
        else:
            self.find_links = []

        self.package_index.add_find_links(self.find_links)

        if not self.args:
            raise DistutilsArgError(
                "No urls, filenames, or requirements specified (see --help)")

        self.outputs = []
Пример #11
0
    def _load_eggs(self, search_path):
        # Redefine where to search entry point.
        distributions, errors = working_set.find_plugins(
            Environment(search_path)
        )
        if errors:
            logger.warn('could not load %s', errors)
        map(working_set.add, distributions)

        # Load each entry point one by one
        for entry in working_set.iter_entry_points('rdiffweb.plugins'):
            # Get unicode plugin name
            module_name = entry.name
            if isinstance(module_name, bytes):
                module_name = module_name.decode('ascii')
            # Plugin is enabled. Load it.
            logger.debug('loading module plugin [%s] from [%r]', module_name, entry.module_name)
            try:
                yield (module_name, entry.load(), entry.dist)
            except:
                logger.error('fail to load module plugin [%s] from [%r]', module_name, entry.module_name, exc_info=1)
Пример #12
0
def list_extensions() -> List[ExtensionInfo]:
    """
    List all installed Chaos Toolkit extensions in the current environment.

    Notice, for now we can only list extensions that start with `chaostoolkit-`
    in their package name.

    This is not as powerful and solid as we want it to be. The trick is that we
    can't rely on any metadata inside extensions to tell us they exist and
    what functionnality they provide either. Python has the concept of trove
    classifiers on packages but we can't extend them yet so they are of no use
    to us.

    In a future version, we will provide a mechanism from packages to support
    a better detection.
    """
    infos = []
    distros = Environment()
    seen = []
    for key in distros:
        for dist in distros[key]:
            if dist.has_metadata('PKG-INFO'):
                m = dist.get_metadata('PKG-INFO')
                info = message_from_string(m)
                name = info["Name"]
                if name == "chaostoolkit-lib":
                    continue
                if name in seen:
                    continue
                seen.append(name)
                if name.startswith("chaostoolkit-"):
                    ext = ExtensionInfo(name=name,
                                        version=info["Version"],
                                        summary=info["Summary"],
                                        license=info["License"],
                                        author=info["Author"],
                                        url=info["Home-page"])
                    infos.append(ext)
    return infos
Пример #13
0
def initPluginEnv(options, path):

    from pkg_resources import working_set, Environment

    # if options is passed in, use prefs to determine what to bypass
    # otherwise all plugins are added to the working_set

    if options is not None:
        prefs = loadPrefs(options)
        pluginPrefs = prefs.get('plugins', None)
    else:
        prefs = None
        pluginPrefs = None

    plugin_env = Environment(path)
    plugin_eggs = []

    # remove uninstalled plugins from prefs
    if pluginPrefs is not None:
        for project_name in pluginPrefs.keys():
            if project_name not in plugin_env:
                del prefs['plugins'][project_name]
        prefs.write()

    # add active plugins and l10n eggs to working set
    for project_name in sorted(plugin_env):
        for egg in plugin_env[project_name]:
            if egg.has_metadata('resources.ini'):
                working_set.add(egg)  # possible l10n egg
            elif (pluginPrefs is None
                  or pluginPrefs.get(project_name) != 'inactive'):
                working_set.add(egg)  # possible plugin egg
                plugin_eggs.append(egg)
            break

    return plugin_env, plugin_eggs
Пример #14
0
 def environment(self):
     return Environment([str(self.sp)])
Пример #15
0
def already_satisfied(req: str) -> bool:
    requirement = Requirement(req)
    environment = Environment()
    return any(dist in requirement for dist in environment[requirement.name])
Пример #16
0
def run(plugins=[], use_eggs=True, egg_path=[], image_path=[], template_path=[], startup_task="", application_name="Omnivore", debug_log=False, document_class=None):
    """Start the application
    
    :param plugins: list of user plugins
    :param use_eggs Boolean: search for setuptools plugins and plugins in local eggs?
    :param egg_path: list of user-specified paths to search for more plugins
    :param startup_task string: task factory identifier for task shown in initial window
    :param application_name string: change application name instead of default Omnivore
    """
    EnthoughtWxApp.mac_menubar_app_name = application_name
    _app = EnthoughtWxApp(redirect=False)
    if False:  # enable this to use FilterEvent
        _app.FilterEvent = _app.FilterEventMouseWheel

    # Enthought library imports.
    from envisage.api import PluginManager
    from envisage.core_plugin import CorePlugin

    # Local imports.
    from omnivore.framework.application import FrameworkApplication
    from omnivore.framework.plugin import OmnivoreTasksPlugin, OmnivoreMainPlugin
    from omnivore.file_type.plugin import FileTypePlugin
    from omnivore import get_image_path
    from omnivore.utils.jobs import get_global_job_manager

    # Include standard plugins
    core_plugins = [ CorePlugin(), OmnivoreTasksPlugin(), OmnivoreMainPlugin(), FileTypePlugin() ]
    if sys.platform == "darwin":
        from omnivore.framework.osx_plugin import OSXMenuBarPlugin
        core_plugins.append(OSXMenuBarPlugin())

    import omnivore.file_type.recognizers
    core_plugins.extend(omnivore.file_type.recognizers.plugins)

    import omnivore.plugins
    core_plugins.extend(omnivore.plugins.plugins)

    # Add the user's plugins
    core_plugins.extend(plugins)

    # Check basic command line args
    default_parser = argparse.ArgumentParser(description="Default Parser")
    default_parser.add_argument("--no-eggs", dest="use_eggs", action="store_false", default=True, help="Do not load plugins from python eggs")
    options, extra_args = default_parser.parse_known_args()

    # The default is to use the specified plugins as well as any found
    # through setuptools and any local eggs (if an egg_path is specified).
    # Egg/setuptool plugin searching is turned off by the use_eggs parameter.
    default = PluginManager(
        plugins = core_plugins,
    )
    if use_eggs and options.use_eggs:
        from pkg_resources import Environment, working_set
        from envisage.api import EggPluginManager
        from envisage.composite_plugin_manager import CompositePluginManager

        # Find all additional eggs and add them to the working set
        environment = Environment(egg_path)
        distributions, errors = working_set.find_plugins(environment)
        if len(errors) > 0:
            raise SystemError('cannot add eggs %s' % errors)
        logger = logging.getLogger()
        logger.debug('added eggs %s' % distributions)
        map(working_set.add, distributions)

        # The plugin manager specifies which eggs to include and ignores all others
        egg = EggPluginManager(
            include = [
                'omnivore.tasks',
            ]
        )

        plugin_manager = CompositePluginManager(
            plugin_managers=[default, egg]
        )
    else:
        plugin_manager = default

    # Add omnivore icons after all image paths to allow user icon themes to take
    # precidence
    from pyface.resource_manager import resource_manager
    import os
    image_paths = image_path[:]
    image_paths.append(get_image_path("icons"))
    image_paths.append(get_image_path("../omnivore8bit/icons"))
    resource_manager.extra_paths.extend(image_paths)

    from omnivore.templates import template_subdirs
    template_subdirs.extend(template_path)

    kwargs = {}
    if startup_task:
        kwargs['startup_task'] = startup_task
    if application_name:
        kwargs['name'] = application_name
    if document_class:
        kwargs['document_class'] = document_class

    # Create a debugging log
    if debug_log:
        filename = app.get_log_file_name("debug")
        handler = logging.FileHandler(filename)
        logger = logging.getLogger('')
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)

    # Turn off omnivore log debug messages by default
    log = logging.getLogger("omnivore")
    log.setLevel(logging.INFO)

    # check for logging stuff again to pick up any new loggers loaded since
    # startup
    import omnivore.utils.wx.error_logger as error_logger
    if "-d" in extra_args:
        i = extra_args.index("-d")
        error_logger.enable_loggers(extra_args[i+1])
    app = FrameworkApplication(plugin_manager=plugin_manager, command_line_args=extra_args, **kwargs)

    app.run()

    job_manager = get_global_job_manager()
    if job_manager is not None:
        job_manager.shutdown()
Пример #17
0
    def configure(self, settings):
        """
        Finds all discoverable plugins and configures them.  Plugins are
        discoverable if they are in the normal python module path, or in
        the path specified by 'plugin directory'.
        """
        # load plugins
        section = settings.section('server')
        self.pluginsdir = os.path.join(section.getPath("plugin directory"))
        if self.pluginsdir:
            logger.debug("loading plugins from %s" % self.pluginsdir)
            working_set.add_entry(self.pluginsdir)
            env = Environment([
                self.pluginsdir,
            ])
        else:
            env = Environment([])
        self._eggs, errors = working_set.find_plugins(env)
        # load plugin eggs
        for p in self._eggs:
            working_set.add(p)
            logger.info("loaded plugin egg '%s'" % p)
        for e in errors:
            logger.info("failed to load plugin egg '%s'" % e)
        # load all discovered plugins for each type
        for ep in working_set.iter_entry_points("terane.plugin"):
            # if no config section exists, then don't load the plugin
            if not settings.hasSection("plugin:%s" % ep.name):
                continue
            try:
                # load and configure the plugin
                _Plugin = ep.load()
                if not IPlugin.implementedBy(_Plugin):
                    raise Exception("plugin '%s' doesn't implement IPlugin" %
                                    ep.name)
                plugin = _Plugin()
                plugin.setName(ep.name)
                plugin.setServiceParent(self)
                section = settings.section("plugin:%s" % ep.name)
                plugin.configure(section)
                logger.info("loaded plugin '%s'" % ep.name)
                # find all plugin components
                for impl, spec, name in plugin.listComponents():
                    if not ILoadable.implementedBy(impl):
                        raise Exception(
                            "component %s:%s in plugin %s doesn't implement ILoadable"
                            % (spec.__name__, name, ep.name))
                    if not isinstance(spec, InterfaceClass):
                        raise TypeError("spec must be an Interface")
                    if (spec, name) in self._components:
                        raise KeyError("component %s:%s already exists" %
                                       (spec.__name__, name))
                    # a little extra syntax here to make sure the lambda expression
                    # passed as the factory function has the appropriate variables bound
                    # in its scope
                    def _makeTrampoline(impl=impl, plugin=plugin):
                        def _trampoline(*args, **kwds):
                            logger.trace("allocating new %s from plugin %s" %
                                         (impl.__name__, plugin.name))
                            return impl(plugin, *args, **kwds)

                        return _trampoline

                    self._components[(spec,
                                      name)] = _makeTrampoline(impl, plugin)
                    logger.trace("added component %s:%s" %
                                 (spec.__name__, name))
            except ConfigureError:
                raise
            except Exception, e:
                logger.exception(e)
                logger.warning("failed to load plugin '%s'" % ep.name)
Пример #18
0
class DistributionsManager(object):
	def __init__(self):
		self.mod_folder = os.path.join(os.getcwd(), "raduga_modules")
		if not os.path.exists(self.mod_folder):
			os.mkdir(self.mod_folder)
		self._init_environment()

	def _init_environment(self):
		dist_folders = map(lambda d: os.path.join(os.getcwd(), self.mod_folder, d), os.listdir(self.mod_folder))
		dist_folders = filter(lambda f: os.path.exists(os.path.join(f, "EGG-INFO")), dist_folders)
		dists = map(lambda f: Distribution.from_filename(f) , dist_folders)
		#
		self.pkg_env = Environment()
		for dist in dists: self.pkg_env.add(dist)

	def _add_to_environment(self, egg_folder):
		dist = Distribution.from_filename(egg_folder)
		self.pkg_env.add(dist)

	def _match_req(self, req):
		return self.pkg_env.best_match(req, working_set)

	def _flatten_reqs(self, *req_sets):
		# req_sets further in the list take precedence
		reqs = {}
		for rset in req_sets:
			for sreq in rset:
				req = Requirement.parse(sreq)
				reqs[req.key] = req
		return reqs.values()

	@contextmanager
	def requirement_loader(self, *req_sets):
		# Save sys.path and sys.modules, to be restored later
		import sys, copy
		old_path = copy.copy(sys.path)
		old_sys_modules = sys.modules.keys()
		# Find distributions for all the requirements
		req_dists = []
		reqs = self._flatten_reqs(req_sets)
		for req in reqs:
			match = self._match_req(req)
			if match is None:
				raise RuntimeError("Unable to find distribution matching %s" % str(req))
			req_dists.append(match)
		# Activate the distributions, return control
		for req in req_dists: req.activate()
		yield
		# Restore sys path and modules
		sys.path = old_path
		for modname in sys.modules.keys():
			if not modname in old_sys_modules:
				del sys.modules[modname]

	def install_dist(self, path):
		setup_py = os.path.join(os.getcwd(), path, "setup.py")
		if not os.path.isfile(setup_py):
			raise RuntimeError("Folder %s doesn't have a setup file" % path)
		with self._build_egg_env(path) as tempdir:
			import subprocess, zipfile
			subprocess.check_call(["python", setup_py, "bdist_egg", "--dist-dir=%s" % tempdir])
			egg = os.listdir(tempdir)[0]    # egg will be the single entry in the temp folder
			# TODO: check if exactly that same egg is installed
			eggf = os.path.join(self.mod_folder, egg)   # target egg folder
			os.mkdir(eggf)
			eggz = zipfile.ZipFile(os.path.join(tempdir, egg))
			eggz.extractall(eggf)

	@contextmanager
	def _build_egg_env(self, path):
		import tempfile, shutil
		old_cwd = os.getcwd()
		os.chdir(path)
		tempdir = tempfile.mkdtemp()
		yield tempdir
		shutil.rmtree(tempdir)
		os.chdir(old_cwd)
Пример #19
0
    "overlay": "p2ner.%s.overlay",
    "serveroverlay": "p2ner.%s.serveroverlay",
    "pipeelement": "p2ner.%s.pipeelement",
    "ui": "p2ner.%s.ui",
    'interface': 'p2ner.%s.interface',
    "input": "p2ner.%s.input",
    "output": "p2ner.%s.output",
    "plugin": "p2ner.%s.plugin",
    "stats": "p2ner.%s.stats",
    "flowcontrol": "p2ner.%s.flowcontrol"
}

COMPONENTS_DIR = os.path.dirname(os.path.abspath(sys.argv[0]))
USER_COMPONENTS_DIR = ""
working_set.add_entry(COMPONENTS_DIR)
pkg_env = Environment([COMPONENTS_DIR])
for name in pkg_env:
    working_set.require(pkg_env[name][0].key)

sys.path.append(COMPONENTS_DIR)


def getComponents(ctype):
    """Get all the components of a given type

    :param ctype: the component's type
    :type ctype: string
    :returns: {"ComponentName":ComponentClass,...}
    :rtype: dict

    """
Пример #20
0
 def __init__(self, context, request):
     super().__init__(context, request)
     environment = self.environment = Environment()
     environment.scan()
Пример #21
0
    FileMetadata,
    PathMetadata,
    ResourceManager,
    Requirement,
    AvailableDistributions,
    DistInfoDistribution,
    Distribution,
)
from xmlrpc.client import ServerProxy
import requests
import tablib.core
from Other import OrderedSet
from operator import itemgetter
from tablib import Dataset

Distributions = Environment(search_path=sys.path)
Distributions.scan(search_path=sys.path)
# print(len(Distributions._distmap))
import pkg_resources

Cache = OrderedSet()
AD = AvailableDistributions()
print(len(AD._distmap))


refiners = "AND,OR,ANDNOT,ANDMAYBE,NOT".split(",")


def boost(term, value):
    return "{}^{}".format(term, value)
class Bootstrapper(object):
    def __init__(self,
                 buildout_version,
                 eggs_dir='bootstrap-eggs',
                 bootstrap_config=None,
                 output_bootstrap_config=None,
                 offline=False,
                 python=sys.executable,
                 buildout_dir=None,
                 buildout_config='buildout.cfg',
                 force_setuptools_path=None,
                 force_distribute=None,
                 force_setuptools=None,
                 error=None):
        """Initializations.

        Right after instantiation, the requirements for ``setuptools`` and
        ``zc.buildout`` are fully known.

        :param error: callable to issue end-user error messages and quit.
        """
        self.init_directories(buildout_dir, eggs_dir)
        self.error = error

        self.init_python_info(python)
        self.bootstrap_config = bootstrap_config
        self.output_bootstrap_config = output_bootstrap_config
        if buildout_version is None:
            buildout_version = self.read_bootstrap_config()
        self.buildout_version = buildout_version
        self.init_reqs(buildout_version,
                       force_setuptools_path=force_setuptools_path,
                       force_distribute=force_distribute,
                       force_setuptools=force_setuptools)
        self.init_internal_attrs()
        self.buildout_config = buildout_config
        self.offline = offline

    def init_directories(self, buildout_dir, eggs_dir):
        if buildout_dir is None:
            buildout_dir = os.getcwd()
        self.buildout_dir = buildout_dir
        self.eggs_dir = os.path.abspath(os.path.join(buildout_dir, eggs_dir))
        if not os.path.exists(self.eggs_dir):
            os.makedirs(self.eggs_dir)

    def bootstrap(self):
        # actually calling the property right now
        logger.info(
            "Starting bootstrap stage 1 for %s (%s) "
            "and " + str(self.buildout_req), self.python, self.python_version)
        if self.setuptools_path is None:
            self.setuptools_path = self.ensure_req(self.setuptools_req)

        paths = dict(setuptools_path=self.setuptools_path,
                     buildout_path=self.ensure_req(self.buildout_req))
        oldpwd = os.getcwd()
        os.chdir(self.buildout_dir)
        try:
            boot_fname = 'bootstrap_stage2.py'
            with open(boot_fname, 'w') as bootf:
                bootf.write(bootstrap_script_tmpl % paths)

            logger.info("Starting bootstrap stage 2 (running %r)", boot_fname)
            cmd = [self.python, boot_fname, '-c', self.buildout_config]
            cmd.append('buildout:eggs-directory=' + self.eggs_dir)
            if self.offline:
                cmd.append("-o")
            logger.debug("Exact stage2 command is %r", cmd)
            subprocess.check_call(cmd)
            self.clean()
            self.dump_bootstrap_config()
            self.remove_develop_eggs()
        finally:
            os.chdir(oldpwd)  # crucial for tests

    def read_bootstrap_config(self):
        """Read buildout version from a bootstrap INI file."""
        config = self.bootstrap_config
        if config is None:
            return

        ini_path = os.path.join(self.buildout_dir, config)
        if not os.path.exists(ini_path):
            logger.info(
                "No bootstrap configuration file found at %r "
                "proceeding without any prescribed buildout version.",
                ini_path)
            return

        logger.info("Reading buildout version from %r", ini_path)
        parser = ConfigParser()
        try:
            parser.read(ini_path)
            return parser.get('bootstrap', 'buildout-version').strip()
        except Exception as exc:
            logger.warn(
                "Exception while reading bootstrap configuration "
                "file %r, %s. Ignoring the file", ini_path, exc)

    def dump_bootstrap_config(self):
        """Write the used buildout version in the bootstrap config file.

        If a precise version has been required, at this stage we know
        it works, just dump it.
        Otherwise, take the version after stage2, there's no reason it should
        not work.

        :param out_path: path to the produced file, relative to the buildout
                         directory
        """
        out_path = self.output_bootstrap_config
        if out_path is None:
            return
        b_version = self.buildout_version

        # we have already changed directory to buildout directory
        # TODO make it independent of cwd
        try:
            if b_version is None:
                # not passed on command line, not read from config file
                # TODO don't hardcode exe path (not so easy)
                cmd = ["bin/buildout", "--version"]
                cmd_str = ' '.join(cmd)
                logger.info(
                    "No buildout version has been specified on "
                    "command line nor in bootstrap config file. "
                    "Using the one stage 2 produced (%s)", cmd_str)
                buildout = subprocess.Popen(cmd, stdout=subprocess.PIPE)
                out = buildout.communicate()[0]
                if buildout.returncode == 0:
                    b_version = out.decode().split()[-1]
                else:
                    raise RuntimeError(cmd_str + " has errors")

            with open(out_path, 'w') as ini:
                ini.write('\n'.join((
                    "# Produced by unibootstrap",
                    "# at time (UTC): " + datetime.utcnow().isoformat(),
                    "# with Python " + sys.version.splitlines()[0],
                    "",
                    "[bootstrap]",
                    "buildout-version = " + b_version,
                )))
        except Exception as exc:
            logger.error("Could not write used "
                         "buildout version in %r (%s)", out_path, exc)
        logger.info("Wrote buildout version %s to %s", b_version, out_path)

    def init_env(self):
        self.ws = WorkingSet(entries=())
        self.env = Environment(search_path=[self.eggs_dir])

    def init_reqs(self,
                  buildout_version,
                  force_setuptools_path=None,
                  force_setuptools=None,
                  force_distribute=None):
        """Sets wished requirement attributes.

        These attributes are instances of ``Requirement`` (all needed
        workarounds for that have already been done), plus this method
        also creates the attributes that will be used in
        :meth:`grab_req`

        :param buildout_version: same as in :func:`guess_versions`
        :param force_setuptools_path: if set, this setuptools distribution
                                      will be used both to grab zc.buildout
                                      and in stage2 of bootstrap. This has
                                      precedence over the other 'force'
                                      arguments below.
        :param force_setuptools: force a setuptools (not distribute) req, and
                                 make the necessary preparations for it.
        :param force_distribute: force a distribute (not setuptools) req, and
                                 make the necessary preparations for it.
        """
        self.init_env()
        buildout_rhs, setuptools, setuptools_rhs = guess_versions(
            buildout_version)
        self.buildout_req = Requirement.parse('zc.buildout' + buildout_rhs)
        # this is almost the CLI, but that lets us control which one is
        # executed from PYTHONPATH (finding the equivalent executable would be
        # more hazardeous)
        self._ez_install = (
            sys.executable,
            '-c',
            "from setuptools.command.easy_install import main; main()",
        )

        if force_setuptools_path is not None:
            logger.info(
                "Using the forced setuptools distribution at %r "
                "both for preparations and in stage2 bootstrap",
                force_setuptools_path)
            self.setuptools_path = force_setuptools_path
            self._ez_install_pypath = force_setuptools_path
            return

        self.setuptools_path = None
        if force_setuptools is not None and force_distribute is not None:
            # should be excluded upstream, last-time check
            logger.critical("Got force_setuptools=%r AND force_distribute=%r",
                            force_setuptools, force_distribute)
            raise ValueError("Can't force both on setuptools and distribute !")
        if force_setuptools is not None:
            setuptools, setuptools_rhs = 'setuptools', force_setuptools
        if force_distribute is not None:
            setuptools, setuptools_rhs = 'distribute', force_distribute

        # actually, installing distribute with any version of setuptools or
        # itself happens to work... provided that the magic buildout marker
        # that tells is setup no to touch the global site-packages is there
        # otherwise, it'd try the rename an .egg_info, and would fail for non-
        # privileged users, even for a local egg production.
        # Usually, the shell would set it, thankfully it's not libc's concern
        if setuptools == 'distribute':
            os.environ['_'] = 'buildout_unibootstrap'

        if DISTRIBUTE and setuptools == 'setuptools':
            self.setuptools_req = self._setuptools_req(setuptools_rhs)
            self.init_ez_install_distribute_to_setuptools()
            return

        self.setuptools_req = Requirement.parse(setuptools + setuptools_rhs)
        self._ez_install_pypath = None

    def ensure_req(self, req):
        """Make sure that requirement is satisfied and return location.

        Either we have it already, or we install it.
        """
        # we don't use obtain() because it's not clear how it would
        # not use the working set with the full sys.path instead of our empty
        # one (fearing also slight behaviour changes across versions)
        dist = self.env.best_match(req, self.ws)
        if dist is None:
            dist = self.grab_req(req)
            self.init_env()
            dist = self.env.best_match(req, self.ws)
            if dist is None:
                raise LookupError(req)
        logger.info("Requirement %s fulfilled at %s", req, dist.location)
        return dist.location

    def grab_req(self, req):
        """Install a requirement to self.eggs_dir.

        We don't use the internal API of setuptools and spawn of subprocess
        because:
        - we might use a different version of setuptools than the one we import
          from here
        - the command-line (or surface) API is obviously very stable
        """
        if self.offline:
            # actually, we would have a small ability to find setuptools
            # eggs that are present locally, but that's too complicated for
            # now and has few chances of success
            self.error("%s is not available in specified dists "
                       "directory %s, and can't be downloaded "
                       "(offline mode is requested)" % (req, self.eggs_dir))

        logger.info("%s not available locally, attempting to download", req)
        os_env = dict(os.environ)
        pypath = self._ez_install_pypath
        if pypath is not None:
            os_env['PYTHONPATH'] = pypath
        subprocess.check_call(self._ez_install +
                              ('-qamxd', self.eggs_dir, str(req)),
                              env=os_env)

    def _setuptools_req(self, req_rhs):
        """Counter distribute's hack that replaces setuptools requirements.

        if distribute is not around, this is simply a normal requirement
        parsing.
        """
        if not DISTRIBUTE:
            return Requirement.parse('setuptools' + req_rhs)

        req = Requirement.parse('willbesetuptools' + req_rhs)
        req.key = req.project_name = 'setuptools'
        return req

    def init_ez_install_distribute_to_setuptools(self):
        """Collection of workarounds to grab setuptools with distribute.

        Here it gets dirty.

        Worse than just being unable to grab setuptools, distribute
        is very confused also for other distributions that require setuptools.
        Therefore, we look for any available setuptools package (typically,
        an egg), and run easy_install from it.

        This sets the 'ez_install_pypath'.
        """

        # first, let's see if we have any version of setuptools around
        dist = self.env.best_match(self._setuptools_req(''), self.ws)
        if dist is not None and dist.project_name == 'setuptools':
            self._ez_install_pypath = dist.location
            return

        setuptools_egg_rx = re.compile(  # glob not precise enough
            r'setuptools-\d+[.]\d+.*?-py' + self.python_version + '.egg')

        # if virtualenv is around, it often comes with a bundled egg
        try:
            import virtualenv
        except ImportError:
            pass
        else:
            for venv_support in (
                    # Debian 6 & 7 python-virtualenv package:
                    '/usr/share/python-virtualenv',
                    # generic install of virtualenv:
                    os.path.join(os.path.dirname(virtualenv.__file__),
                                 'virtualenv_support')):
                # TODO look for wheels, too, more recent virtualenv versions
                # have them
                if os.path.isdir(venv_support):
                    for fname in os.listdir(venv_support):
                        if setuptools_egg_rx.match(fname):
                            self._ez_install_pypath = os.path.join(
                                venv_support, fname)
                            return

        # last chance: old-school harcoded downloading.
        self._tmpdir = tempfile.mkdtemp()
        # Up to now in my tests, this minimial version of setuptools is
        # able to download all known setuptools versions and is the one
        # used in many versions of virtualenv (prior to wheels)
        # kept for reference for now, must retest with distribute
        #    min_pkg = 'setuptools-0.6c11-py%s.egg' % self.python_version
        min_pkg = 'setuptools-5.5.1-py2.py3-none-any.whl'
        dl = urlopen('https://pypi.python.org/packages/3.4/s/setuptools/' +
                     min_pkg).read()
        pypath = self._ez_install_pypath = os.path.join(self._tmpdir, min_pkg)
        with open(pypath, 'wb') as pkg:
            pkg.write(dl)

    def clean(self):
        tmpdir = self._tmpdir
        if tmpdir is not None and os.path.isdir(tmpdir):
            shutil.rmtree(tmpdir)

    def remove_develop_eggs(self):
        """This saves a lot of potential trouble."""
        # TODO read it from buildout and remove if below buildout_dir
        develop_eggs = os.path.join(self.buildout_dir, 'develop_eggs')
        if os.path.isdir(develop_eggs):
            shutil.rmtree(develop_eggs)

    def init_python_info(self, python):
        """Used the passed executable path to get version and absolute path.

        The x.y version string of the target Python."""
        cmd = [
            python, '-c',
            'import sys; print(sys.version); print(sys.executable)'
        ]

        try:  # no check_output in Python 2.6
            p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
        except OSError:
            self.error("No wished Python %r executable on PATH" % python)
        out = p.communicate()[0]
        if p.returncode != 0:
            self.error("The target Python executable has errors, "
                       "command was " + ' '.join(cmd))

        lines = out.decode().splitlines()
        self.python_version = '.'.join(lines[0].split()[0].split('.', 2)[:2])
        self.python = lines[-1]

    def init_internal_attrs(self):
        self._tmpdir = None  # for cleanups
        self._pyversion = None  # for property
Пример #23
0
 def __init__(self, groups=_plugin_groups, search_path=None):
     super(PkgResourcesFactory, self).__init__()
     self._have_new_types = True
     self._groups = copy.copy(groups)
     self._search_path = search_path
     self.env = Environment(search_path)
Пример #24
0
class FigmentatorFactory:
    """
    This class acts as a factory and a registry for Figmentators. You simply register a
    Figmentator to the factory, then ask for the Figmentator by SuggestionType. It only
    allows a single unique Figmentator per SuggestionType. It is also responsible for
    calling the startup/shutdown of each Figmentator.
    """
    def __init__(self):
        """ Create the factory """
        self.lock = Lock()
        self.env = Environment()
        self.loop = get_event_loop()
        self.figmentators_by_type = {}
        self.settings = FigmentatorFactorySettings()

    @property
    def figmentators(self):
        """ Return a list of all the figmentators """
        return list(self.figmentators_by_type.values())

    async def get(self, suggestion_type: SuggestionType) -> Figmentator:
        """
        Get a Figmentator by it's type. If there are valid settings for a
        Figmentator it will instatiate it (and install any needed requirements.
        """
        async with self.lock:
            if suggestion_type not in self.figmentators_by_type:
                if (suggestion_type not in self.settings.figmentators  # pylint:disable=unsupported-membership-test
                    ):
                    raise ValueError(
                        f"Cannot create Figmentator of type {suggestion_type}")

                settings = self.settings.figmentators[  # pylint:disable=unsubscriptable-object
                    suggestion_type]

                requirements = await self.loop.run_in_executor(
                    None,  # use default executor
                    working_set.resolve,  # resolve model requirements
                    settings.requires,  # list of requirements
                    self.env,  # environment
                    self.installer,  # installer for missing requirements
                    True,  # whether to replace conflicting requirements
                )

                for requirement in requirements:
                    working_set.add(requirement, replace=True)

                try:
                    model_cls = settings.cls.resolve()
                except ImportError as e:
                    raise e

                if not issubclass(model_cls, Figmentator):
                    raise ValueError(
                        "model_cls must be a subclass of Figmentator")

                figmentator = model_cls(suggestion_type)
                await self.loop.run_in_executor(None, figmentator.startup,
                                                settings.properties)
                self.figmentators_by_type[suggestion_type] = figmentator

            return self.figmentators_by_type[suggestion_type]

    async def remove(self, figmentator: Figmentator):
        """
        Get a Figmentator by it's type. If there are valid settings for a
        Figmentator it will instatiate it (and install any needed requirements.
        """
        async with self.lock:
            suggestion_type = figmentator.suggestion_type
            assert suggestion_type in self.figmentators_by_type

            figmentator.shutdown()
            del self.figmentators_by_type[suggestion_type]

    def get_distribution(self,
                         requirement: Requirement) -> Optional[Distribution]:
        """
        Get a distribution that statisfies the requirement. Use
        find_distributions so it looks on disk, thus can find newly installed
        pacakges. """
        for distribution in find_distributions(self.settings.install_dir):
            if distribution in requirement:
                return distribution

        return None

    def installer(self, requirement: Requirement) -> Optional[Distribution]:
        """ A method for using easy_install to install a requirement """
        for dist in self.env[requirement.key]:
            if dist not in requirement:
                self.env.remove(dist)

        # Use easy_install despite being deprecated as it is the only way to
        # have multi-version package support. See:
        # https://packaging.python.org/guides/multi-version-installs/
        # https://packaging.python.org/discussions/pip-vs-easy-install/
        with shadow_argv([
                "",
                "easy_install",
                "--install-dir",
                self.settings.install_dir,
                str(requirement),
        ]):
            setup()

        distribution = self.get_distribution(requirement)
        if distribution:
            working_set.add(distribution, replace=True)

        return distribution
Пример #25
0
 def pkg_environment(path):
     return Environment(path)
 def init_env(self):
     self.ws = WorkingSet(entries=())
     self.env = Environment(search_path=[self.eggs_dir])
Пример #27
0
class Bootstrapper(object):

    def __init__(self, buildout_version, eggs_dir='bootstrap-eggs',
                 bootstrap_config=None,
                 output_bootstrap_config=None,
                 offline=False,
                 python=sys.executable,
                 buildout_dir=None,
                 buildout_config='buildout.cfg',
                 force_setuptools_path=None,
                 force_distribute=None,
                 force_setuptools=None,
                 error=None):
        """Initializations.

        Right after instantiation, the requirements for ``setuptools`` and
        ``zc.buildout`` are fully known.

        :param error: callable to issue end-user error messages and quit.
        """
        self.init_directories(buildout_dir, eggs_dir)
        self.error = error

        self.init_python_info(python)
        self.bootstrap_config = bootstrap_config
        self.output_bootstrap_config = output_bootstrap_config
        if buildout_version is None:
            buildout_version = self.read_bootstrap_config()
        self.buildout_version = buildout_version
        self.init_reqs(buildout_version,
                       force_setuptools_path=force_setuptools_path,
                       force_distribute=force_distribute,
                       force_setuptools=force_setuptools)
        self.init_internal_attrs()
        self.buildout_config = buildout_config
        self.offline = offline

    def init_directories(self, buildout_dir, eggs_dir):
        if buildout_dir is None:
            buildout_dir = os.getcwd()
        self.buildout_dir = buildout_dir
        self.eggs_dir = os.path.abspath(os.path.join(buildout_dir, eggs_dir))
        if not os.path.exists(self.eggs_dir):
            os.makedirs(self.eggs_dir)

    def bootstrap(self):
        # actually calling the property right now
        logger.info("Starting bootstrap stage 1 for %s (%s) "
                    "and " + str(self.buildout_req),
                    self.python, self.python_version)
        if self.setuptools_path is None:
            self.setuptools_path = self.ensure_req(self.setuptools_req)

        paths = dict(setuptools_path=self.setuptools_path,
                     buildout_path=self.ensure_req(self.buildout_req))
        oldpwd = os.getcwd()
        os.chdir(self.buildout_dir)
        try:
            boot_fname = 'bootstrap_stage2.py'
            with open(boot_fname, 'w') as bootf:
                bootf.write(bootstrap_script_tmpl % paths)

            logger.info("Starting bootstrap stage 2 (running %r)",
                        boot_fname)
            cmd = [self.python, boot_fname, '-c', self.buildout_config]
            cmd.append('buildout:eggs-directory=' + self.eggs_dir)
            if self.offline:
                cmd.append("-o")
            logger.debug("Exact stage2 command is %r", cmd)
            subprocess.check_call(cmd)
            self.clean()
            self.dump_bootstrap_config()
            self.remove_develop_eggs()
        finally:
            os.chdir(oldpwd)  # crucial for tests

    def read_bootstrap_config(self):
        """Read buildout version from a bootstrap INI file."""
        config = self.bootstrap_config
        if config is None:
            return

        ini_path = os.path.join(self.buildout_dir, config)
        if not os.path.exists(ini_path):
            logger.info("No bootstrap configuration file found at %r "
                        "proceeding without any prescribed buildout version.",
                        ini_path)
            return

        logger.info("Reading buildout version from %r", ini_path)
        parser = ConfigParser()
        try:
            parser.read(ini_path)
            return parser.get('bootstrap', 'buildout-version').strip()
        except Exception as exc:
            logger.warn("Exception while reading bootstrap configuration "
                        "file %r, %s. Ignoring the file",
                        ini_path, exc)

    def dump_bootstrap_config(self):
        """Write the used buildout version in the bootstrap config file.

        If a precise version has been required, at this stage we know
        it works, just dump it.
        Otherwise, take the version after stage2, there's no reason it should
        not work.

        :param out_path: path to the produced file, relative to the buildout
                         directory
        """
        out_path = self.output_bootstrap_config
        if out_path is None:
            return
        b_version = self.buildout_version

        # we have already changed directory to buildout directory
        # TODO make it independent of cwd
        try:
            if b_version is None:
                # not passed on command line, not read from config file
                # TODO don't hardcode exe path (not so easy)
                cmd = ["bin/buildout", "--version"]
                cmd_str = ' '.join(cmd)
                logger.info("No buildout version has been specified on "
                            "command line nor in bootstrap config file. "
                            "Using the one stage 2 produced (%s)",
                            cmd_str)
                buildout = subprocess.Popen(cmd, stdout=subprocess.PIPE)
                out = buildout.communicate()[0]
                if buildout.returncode == 0:
                    b_version = out.decode().split()[-1]
                else:
                    raise RuntimeError(cmd_str + " has errors")

            with open(out_path, 'w') as ini:
                ini.write('\n'.join((
                    "# Produced by unibootstrap",
                    "# at time (UTC): " + datetime.utcnow().isoformat(),
                    "# with Python " + sys.version.splitlines()[0],
                    "",
                    "[bootstrap]",
                    "buildout-version = " + b_version,
                )))
        except Exception as exc:
            logger.error("Could not write used "
                         "buildout version in %r (%s)", out_path, exc)
        logger.info("Wrote buildout version %s to %s", b_version, out_path)

    def init_env(self):
        self.ws = WorkingSet(entries=())
        self.env = Environment(search_path=[self.eggs_dir])

    def init_reqs(self, buildout_version,
                  force_setuptools_path=None,
                  force_setuptools=None, force_distribute=None):
        """Sets wished requirement attributes.

        These attributes are instances of ``Requirement`` (all needed
        workarounds for that have already been done), plus this method
        also creates the attributes that will be used in
        :meth:`grab_req`

        :param buildout_version: same as in :func:`guess_versions`
        :param force_setuptools_path: if set, this setuptools distribution
                                      will be used both to grab zc.buildout
                                      and in stage2 of bootstrap. This has
                                      precedence over the other 'force'
                                      arguments below.
        :param force_setuptools: force a setuptools (not distribute) req, and
                                 make the necessary preparations for it.
        :param force_distribute: force a distribute (not setuptools) req, and
                                 make the necessary preparations for it.
        """
        self.init_env()
        buildout_rhs, setuptools, setuptools_rhs = guess_versions(
            buildout_version)
        self.buildout_req = Requirement.parse('zc.buildout' + buildout_rhs)
        # this is almost the CLI, but that lets us control which one is
        # executed from PYTHONPATH (finding the equivalent executable would be
        # more hazardeous)
        self._ez_install = (
            sys.executable, '-c',
            "from setuptools.command.easy_install import main; main()",
        )

        if force_setuptools_path is not None:
            logger.info("Using the forced setuptools distribution at %r "
                        "both for preparations and in stage2 bootstrap",
                        force_setuptools_path)
            self.setuptools_path = force_setuptools_path
            self._ez_install_pypath = force_setuptools_path
            return

        self.setuptools_path = None
        if force_setuptools is not None and force_distribute is not None:
            # should be excluded upstream, last-time check
            logger.critical("Got force_setuptools=%r AND force_distribute=%r",
                            force_setuptools, force_distribute)
            raise ValueError("Can't force both on setuptools and distribute !")
        if force_setuptools is not None:
            setuptools, setuptools_rhs = 'setuptools', force_setuptools
        if force_distribute is not None:
            setuptools, setuptools_rhs = 'distribute', force_distribute

        # actually, installing distribute with any version of setuptools or
        # itself happens to work... provided that the magic buildout marker
        # that tells is setup no to touch the global site-packages is there
        # otherwise, it'd try the rename an .egg_info, and would fail for non-
        # privileged users, even for a local egg production.
        # Usually, the shell would set it, thankfully it's not libc's concern
        if setuptools == 'distribute':
            os.environ['_'] = 'buildout_unibootstrap'

        if DISTRIBUTE and setuptools == 'setuptools':
            self.setuptools_req = self._setuptools_req(setuptools_rhs)
            self.init_ez_install_distribute_to_setuptools()
            return

        self.setuptools_req = Requirement.parse(setuptools + setuptools_rhs)
        self._ez_install_pypath = None

    def ensure_req(self, req):
        """Make sure that requirement is satisfied and return location.

        Either we have it already, or we install it.
        """
        # we don't use obtain() because it's not clear how it would
        # not use the working set with the full sys.path instead of our empty
        # one (fearing also slight behaviour changes across versions)
        dist = self.env.best_match(req, self.ws)
        if dist is None:
            dist = self.grab_req(req)
            self.init_env()
            dist = self.env.best_match(req, self.ws)
            if dist is None:
                raise LookupError(req)
        logger.info("Requirement %s fulfilled at %s", req, dist.location)
        return dist.location

    def grab_req(self, req):
        """Install a requirement to self.eggs_dir.

        We don't use the internal API of setuptools and spawn of subprocess
        because:
        - we might use a different version of setuptools than the one we import
          from here
        - the command-line (or surface) API is obviously very stable
        """
        if self.offline:
            # actually, we would have a small ability to find setuptools
            # eggs that are present locally, but that's too complicated for
            # now and has few chances of success
            self.error("%s is not available in specified dists "
                       "directory %s, and can't be downloaded "
                       "(offline mode is requested)" % (req, self.eggs_dir))

        logger.info("%s not available locally, attempting to download", req)
        os_env = dict(os.environ)
        pypath = self._ez_install_pypath
        if pypath is not None:
            os_env['PYTHONPATH'] = pypath
        subprocess.check_call(self._ez_install +
                              ('-qamxd', self.eggs_dir, str(req)),
                              env=os_env)

    def _setuptools_req(self, req_rhs):
        """Counter distribute's hack that replaces setuptools requirements.

        if distribute is not around, this is simply a normal requirement
        parsing.
        """
        if not DISTRIBUTE:
            return Requirement.parse('setuptools' + req_rhs)

        req = Requirement.parse('willbesetuptools' + req_rhs)
        req.key = req.project_name = 'setuptools'
        return req

    def init_ez_install_distribute_to_setuptools(self):
        """Collection of workarounds to grab setuptools with distribute.

        Here it gets dirty.

        Worse than just being unable to grab setuptools, distribute
        is very confused also for other distributions that require setuptools.
        Therefore, we look for any available setuptools package (typically,
        an egg), and run easy_install from it.

        This sets the 'ez_install_pypath'.
        """

        # first, let's see if we have any version of setuptools around
        dist = self.env.best_match(self._setuptools_req(''), self.ws)
        if dist is not None and dist.project_name == 'setuptools':
            self._ez_install_pypath = dist.location
            return

        setuptools_egg_rx = re.compile(  # glob not precise enough
            r'setuptools-\d+[.]\d+.*?-py' + self.python_version + '.egg')

        # if virtualenv is around, it often comes with a bundled egg
        try:
            import virtualenv
        except ImportError:
            pass
        else:
            for venv_support in (
                    # Debian 6 & 7 python-virtualenv package:
                    '/usr/share/python-virtualenv',
                    # generic install of virtualenv:
                    os.path.join(os.path.dirname(virtualenv.__file__),
                                 'virtualenv_support')):
                # TODO look for wheels, too, more recent virtualenv versions
                # have them
                if os.path.isdir(venv_support):
                    for fname in os.listdir(venv_support):
                        if setuptools_egg_rx.match(fname):
                            self._ez_install_pypath = os.path.join(
                                venv_support, fname)
                            return

        # last chance: old-school harcoded downloading.
        self._tmpdir = tempfile.mkdtemp()
        # Up to now in my tests, this minimial version of setuptools is
        # able to download all known setuptools versions and is the one
        # used in many versions of virtualenv (prior to wheels)
        # kept for reference for now, must retest with distribute
        #    min_pkg = 'setuptools-0.6c11-py%s.egg' % self.python_version
        min_pkg = 'setuptools-5.5.1-py2.py3-none-any.whl'
        dl = urlopen('https://pypi.python.org/packages/3.4/s/setuptools/' +
                     min_pkg).read()
        pypath = self._ez_install_pypath = os.path.join(self._tmpdir, min_pkg)
        with open(pypath, 'wb') as pkg:
            pkg.write(dl)

    def clean(self):
        tmpdir = self._tmpdir
        if tmpdir is not None and os.path.isdir(tmpdir):
            shutil.rmtree(tmpdir)

    def remove_develop_eggs(self):
        """This saves a lot of potential trouble."""
        # TODO read it from buildout and remove if below buildout_dir
        develop_eggs = os.path.join(self.buildout_dir, 'develop_eggs')
        if os.path.isdir(develop_eggs):
            shutil.rmtree(develop_eggs)

    def init_python_info(self, python):
        """Used the passed executable path to get version and absolute path.

        The x.y version string of the target Python."""
        cmd = [python, '-c',
               'import sys; print(sys.version); print(sys.executable)']

        try:  # no check_output in Python 2.6
            p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
        except OSError:
            self.error("No wished Python %r executable on PATH" % python)
        out = p.communicate()[0]
        if p.returncode != 0:
            self.error("The target Python executable has errors, "
                       "command was " + ' '.join(cmd))

        lines = out.decode().splitlines()
        self.python_version = '.'.join(lines[0].split()[0].split('.', 2)[:2])
        self.python = lines[-1]

    def init_internal_attrs(self):
        self._tmpdir = None  # for cleanups
        self._pyversion = None  # for property
Пример #28
0
 def init_env(self):
     self.ws = WorkingSet(entries=())
     self.env = Environment(search_path=[self.eggs_dir])
Пример #29
0
# -*- coding: utf-8 -*-

from base64 import b64encode
from couchdb.http import ResourceConflict
from json import dumps
from libnacl.secret import SecretBox
from logging import getLogger
from openprocurement.api.utils import context_unpack, json_view, APIResource
from pyramid.security import Allow
from pkg_resources import Environment
from itertools import chain

PKG_ENV = Environment()
PKG_VERSIONS = dict(
    chain.from_iterable([[(x.project_name, x.version) for x in PKG_ENV[i]]
                         for i in PKG_ENV]))

LOGGER = getLogger(__package__)


class Root(object):
    __name__ = None
    __parent__ = None
    __acl__ = [
        (Allow, 'g:archivarius', 'dump_resource'),
        (Allow, 'g:archivarius', 'delete_resource'),
    ]

    def __init__(self, request):
        self.request = request
        self.db = request.registry.db
Пример #30
0
    def fetch_dist(self, requirement):
        """Fetch an egg needed for building.
        Use pip/wheel to fetch/build a wheel."""
        get_distribution("pip")
        get_distribution("wheel")
        # Ignore environment markers; if supplied, it is required.
        requirement = Requirement.parse(str(requirement))
        requirement.marker = None
        # Take easy_install options into account, but do not override relevant
        # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
        # take precedence.
        if "PIP_QUIET" in environ or "PIP_VERBOSE" in environ:
            quiet = False
        else:
            quiet = True
        index_url = None
        find_links = []
        environment = Environment()
        for dist in find_distributions(self.dist_dir):
            if dist in requirement and environment.can_add(dist):
                return dist
        with TemporaryDirectory() as tmpdir:
            cmd = [
                executable,
                "-m",
                "pip",
                "--disable-pip-version-check",
                "wheel",
                "--no-deps",
                "-w",
                tmpdir,
            ]
            if quiet:
                cmd.append("--quiet")
            if "PIP_NO_CACHE_DIR" not in environ and self.no_cache_dir:
                cmd.append("--no-cache-dir")
            if index_url is not None:
                cmd.extend(("--index-url", index_url))
            if find_links is not None:
                for link in find_links:
                    cmd.extend(("--find-links", link))
            # If requirement is a PEP 508 direct URL, directly pass
            # the URL to pip, as `req @ url` does not work on the
            # command line.
            if requirement.url:
                cmd.append(requirement.url)
            else:
                cmd.append(str(requirement))
            check_call(cmd)
            with ZipFile(glob(path.join(tmpdir, "*.whl"))[0], "r") as zf:
                zf.extractall(self.dist_dir)

            pattern = re_compile(
                translate(
                    f'{requirement.project_name.replace("-","_")}-*.dist-info'
                ),
                IGNORECASE,
            )
            dist_path = [
                path.join(self.dist_dir, x) for x in listdir(self.dist_dir) if
                path.isdir(path.join(self.dist_dir, x)) and match(pattern, x)
            ][0]

            root = path.dirname(dist_path)
            return Distribution.from_location(
                root,
                path.basename(dist_path),
                PathMetadata(root, dist_path),
                precedence=BINARY_DIST,
            )
Пример #31
0
    def resolve(
        self,
        requirements: Sequence[Requirement],
        env: Optional[Environment] = None,
        installer: Optional[Callable[[str], Distribution]] = None,
        replace_conflicting: Optional[bool] = False,
        extras: List[str] = None,
    ) -> List[Distribution]:
        """List all distributions needed to (recursively) meet `requirements`
        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
        if supplied, should be an ``Environment`` instance.  If
        not supplied, it defaults to all distributions available within any
        entry or distribution in the working set.  `installer`, if supplied,
        will be invoked with each requirement that cannot be met by an
        already-installed distribution; it should return a ``Distribution`` or
        ``None``.
        Unless `replace_conflicting=True`, raises a VersionConflict exception
        if
        any requirements are found on the path that have the correct name but
        the wrong version.  Otherwise, if an `installer` is supplied it will be
        invoked to obtain the correct version of the requirement and activate
        it.
        `extras` is a list of the extras to be used with these requirements.
        This is important because extra requirements may look like `my_req;
        extra = "my_extra"`, which would otherwise be interpreted as a purely
        optional requirement.  Instead, we want to be able to assert that these
        requirements are truly required.
        """

        # set up the stack
        requirements = list(requirements)[::-1]
        # set of processed requirements
        processed = {}
        # key -> dist
        best = {}
        resolved = []

        requirement_extras = _ReqExtras()

        # Mapping of requirement to set of distributions that required it;
        # useful for reporting info about conflicts.
        required_by = defaultdict(set)

        while requirements:
            # process dependencies breadth-first
            requirement = requirements.pop(0)
            if requirement in processed:
                # Ignore cyclic or redundant dependencies
                continue

            if not requirement_extras.markers_pass(requirement, extras):
                continue

            dist = best.get(requirement.key)
            if dist is None:
                # Find the best distribution and add it to the map
                dist = self.by_key.get(requirement.key)
                if dist is None or (dist not in requirement and replace_conflicting):
                    ws = self
                    if env is None:
                        if dist is None:
                            env = Environment(self.entries)
                        else:
                            # Use an empty environment and workingset to avoid
                            # any further conflicts with the conflicting
                            # distribution
                            env = Environment([])
                            ws = WorkingSet([])
                    dist = best[requirement.key] = env.best_match(
                        requirement,
                        ws,
                        installer,
                        replace_conflicting=replace_conflicting,
                    )
                    if dist is None:
                        requirers = required_by.get(requirement, None)
                        raise DistributionNotFound(requirement, requirers)
                resolved.append(dist)

            if dist not in requirement:
                # Oops, the "best" so far conflicts with a dependency
                dependent_requirement = required_by[requirement]
                raise VersionConflict(dist, requirement).with_context(
                    dependent_requirement
                )

            # push the new requirements onto the stack
            new_requirements = [
                requirement
                for requirement in dist.requires(requirement.extras)[::-1]
                if requirement.key not in self.excludes
            ]
            requirements.extend(new_requirements)

            # Register the new requirements needed by requirement
            for new_requirement in new_requirements:
                required_by[new_requirement].add(requirement.project_name)
                requirement_extras[new_requirement] = requirement.extras

            processed[requirement] = True

        # return list of distros to activate
        return resolved