def find_egg_entry_point(self, object_type, name=None): """ Returns the (entry_point, protocol) for the with the given ``name``. """ if name is None: name = 'main' possible = [] for protocol_options in object_type.egg_protocols: for protocol in protocol_options: pkg_resources.require(self.spec) entry = pkg_resources.get_entry_info( self.spec, protocol, name) if entry is not None: possible.append((entry.load(), protocol, entry.name)) break if not possible: # Better exception dist = pkg_resources.get_distribution(self.spec) raise LookupError( "Entry point %r not found in egg %r (dir: %s; protocols: %s; " "entry_points: %s)" % (name, self.spec, dist.location, ', '.join(_flatten(object_type.egg_protocols)), ', '.join(_flatten([ list((pkg_resources.get_entry_info(self.spec, prot, name) or {}).keys()) for prot in protocol_options] or '(no entry points)')))) if len(possible) > 1: raise LookupError( "Ambiguous entry points for %r in egg %r (protocols: %s)" % (name, self.spec, ', '.join(_flatten(protocol_options)))) return possible[0]
def _get_platform_info(name): entry = get_entry_info('bfg9000', 'bfg9000.platforms', name) if entry is None: # Fall back to a generic POSIX system if we don't recognize the # platform name. entry = get_entry_info('bfg9000', 'bfg9000.platforms', 'posix') return entry.load()(name)
def find_egg_entry_point(self, object_type, name=None): """ Returns the (entry_point, protocol) for the with the given ``name``. """ if name is None: name = 'main' possible = [] for protocol_options in object_type.egg_protocols: for protocol in protocol_options: pkg_resources.require(self.spec) entry = pkg_resources.get_entry_info( self.spec, protocol, name) if entry is not None: possible.append((entry.load(), protocol, entry.name)) break if not possible: # Better exception dist = pkg_resources.get_distribution(self.spec) raise LookupError( "Entry point %r not found in egg %r (dir: %s; protocols: %s; " "entry_points: %s)" % (name, self.spec, dist.location, ', '.join(_flatten(object_type.egg_protocols)), ', '.join(_flatten([ dictkeys(pkg_resources.get_entry_info(self.spec, prot, name) or {}) for prot in protocol_options] or '(no entry points)')))) if len(possible) > 1: raise LookupError( "Ambiguous entry points for %r in egg %r (protocols: %s)" % (name, self.spec, ', '.join(_flatten(protocol_options)))) return possible[0]
def _get_platform_info(kind, genus, species, arch): entry_point = 'bfg9000.platforms.{}'.format(kind) entry = get_entry_info('bfg9000', entry_point, genus) if entry is None: # Fall back to a generic POSIX system if we don't recognize the # platform name. entry = get_entry_info('bfg9000', entry_point, 'posix') return entry.load()(genus, species, arch)
def import_packaged_module(module_dist, module_object): module_code = module_object.module_code module_dir = GN_EXTERNAL_MODULE / module_object.module_path frontend_path = os.environ.get(f'GEONATURE_{module_code}_FRONTEND_PATH', str(module_dir / 'frontend')) module_config = { 'MODULE_CODE': module_code, 'MODULE_URL': '/' + module_object.module_path, 'FRONTEND_PATH': frontend_path, } try: module_schema = load_entry_point(module_dist, 'gn_module', 'config_schema') except ImportError: pass else: config_path = os.environ.get( f'GEONATURE_{module_object.module_code}_CONFIG_FILE') if not config_path: # fallback to legacy conf path guessing config_path = str(module_dir / 'config/conf_gn_module.toml') module_config.update(load_and_validate_toml(config_path, module_schema)) blueprint_entry_point = get_entry_info(module_dist, 'gn_module', 'blueprint') if blueprint_entry_point: module_blueprint = blueprint_entry_point.load() module_blueprint.config = module_config else: module_blueprint = None return (module_object, module_config, module_blueprint)
def help(args, parser): if args: # TODO: First check for static help text file before # generating it at run time. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: help_text = build_help_text(ep.load()) print help_text sys.exit(0) print "No help found for %s" % args[0] sys.exit(0) parser.print_help() # TODO: For now we'll print every command found with an entry # point. In the future this needs to be switched to printing # a hard-coded list, so that we can include commands we create # using shell scripts etc. commands = pkg_resources.get_entry_map('rbtools', 'rbtools_commands') print "\nThe most commonly used commands are:" for command in commands: print " %s" % command print ("See '%s help <command>' for more information " "on a specific command." % RB_MAIN) sys.exit(0)
def help(args, parser): if args: # TODO: First check for static help text file before # generating it at run time. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: help_text = build_help_text(ep.load()) print help_text sys.exit(0) print "No help found for %s" % args[0] sys.exit(0) parser.print_help() commands = pkg_resources.get_entry_map('rbtools', 'rbtools_commands') common_commands = ['post', 'patch', 'close', 'diff'] print "\nThe most commonly used commands are:" for command in common_commands: print " %s" % command print "\nOther commands:" for command in sorted(commands): if command not in common_commands: print " %s" % command print ("See '%s help <command>' for more information " "on a specific command." % RB_MAIN) sys.exit(0)
def _fetch(): # get blueprints, dists, and so on from pkg_resources. # # We're careful to load all of the blueprints exactly once and before # registering any of them, as this ensures everything is imported before # any of the @bp.register-decorated methods are called global _blueprints global _distributions if not _distributions: _distributions = {} for dist in pkg_resources.WorkingSet(): dist.relengapi_metadata = {} _distributions[dist.key] = dist if not _blueprints: _blueprints = [] entry_points = ( list(pkg_resources.iter_entry_points('relengapi_blueprints')) + list(pkg_resources.iter_entry_points('relengapi.blueprints'))) for ep in entry_points: bp = ep.load() # make sure we have only one copy of each Distribution bp.dist = _distributions[ep.dist.key] _blueprints.append(bp) # look for relengapi metadata for every dist containing a blueprint blueprint_dists = {bp.dist.key: bp.dist for bp in _blueprints}.values() for dist in blueprint_dists: ep = pkg_resources.get_entry_info(dist, 'relengapi.metadata', dist.key) if not ep: continue dist.relengapi_metadata = ep.load()
def _fetch(): # get blueprints, dists, and so on from pkg_resources. # # We're careful to load all of the blueprints exactly once and before # registering any of them, as this ensures everything is imported before # any of the @bp.register-decorated methods are called global _blueprints global _distributions if not _distributions: _distributions = {} for dist in pkg_resources.WorkingSet(): dist.relengapi_metadata = {} _distributions[dist.key] = dist if not _blueprints: _blueprints = [] entry_points = (list(pkg_resources.iter_entry_points('relengapi_blueprints')) + list(pkg_resources.iter_entry_points('relengapi.blueprints'))) for ep in entry_points: bp = ep.load() # make sure we have only one copy of each Distribution bp.dist = _distributions[ep.dist.key] _blueprints.append(bp) # look for relengapi metadata for every dist containing a blueprint blueprint_dists = {bp.dist.key: bp.dist for bp in _blueprints}.values() for dist in blueprint_dists: ep = pkg_resources.get_entry_info(dist, 'relengapi.metadata', dist.key) if not ep: continue dist.relengapi_metadata = ep.load()
def setup_pipeline(self, app, global_conf=None): for spec, protocol, name, extra in self.pipeline: if protocol == 'factory': app = spec(app, **extra) continue entrypoint = pkg_resources.get_entry_info(spec, protocol, name) app = entrypoint.load()(app, global_conf, **extra) return app
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = argparse.ArgumentParser( prog=RB_MAIN, usage='%(prog)s [--version] <command> [options] [<args>]', add_help=False) for option in GLOBAL_OPTIONS: option.add_to(parser) opt = parser.parse_args() if not opt.command: help([], parser) command_name = opt.command[0] args = opt.command[1:] if command_name == "help": help(args, parser) elif opt.help or "--help" in args or '-h' in args: help(opt.command, parser) # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", command_name) if not ep: try: ep = pkg_resources.iter_entry_points('rbtools_commands', command_name).next() except StopIteration: # There aren't any custom entry points defined. pass if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write("Could not load command entry point %s\n" % ep.name) sys.exit(1) except Exception, e: sys.stderr.write("Unexpected error loading command %s: %s\n" % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN, command_name] + args)
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = argparse.ArgumentParser( prog=RB_MAIN, usage='%(prog)s [--version] <command> [options] [<args>]', add_help=False) for option in GLOBAL_OPTIONS: option.add_to(parser) opt = parser.parse_args() if not opt.command: help([], parser) command_name = opt.command[0] args = opt.command[1:] if command_name == "help": help(args, parser) elif opt.help or "--help" in args or '-h' in args: help(opt.command, parser) # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", command_name) if not ep: try: ep = pkg_resources.iter_entry_points('rbtools_commands', command_name).next() except StopIteration: # There aren't any custom entry points defined. pass if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write("Could not load command entry point %s\n" % ep.name) sys.exit(1) except Exception, e: sys.stderr.write("Unexpected error loading command %s: %s\n" % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN, command_name] + args)
def make_wsgi_app(self, global_config): app = self.config.make_wsgi_app() for spec, protocol, name, extra in self.pipeline: if protocol == 'factory': app = spec(app, **extra) continue entrypoint = pkg_resources.get_entry_info(spec, protocol, name) app = entrypoint.load()(app, global_config, **extra) return app
def test_plugins(group, name, cls, dist='custodia'): ep = pkg_resources.get_entry_info(dist, group, name) assert ep is not None assert ep.dist.project_name == dist if hasattr(ep, 'resolve'): resolved = ep.resolve() else: resolved = ep.load(require=False) assert resolved is cls
def __getitem__(self, key): if key not in self: info = get_entry_info(__package__, f'{__package__}_extensions', key) if info is not None: class LazyClass(metaclass=LazyMeta, info=info): pass self[key] = LazyClass return super().__getitem__(key)
def make_wsgi_app(self): self._wsgi_conf_dir = tempfile.mkdtemp() global_config = {'here': self._wsgi_conf_dir} zope_conf = self._get_zope_conf(self._wsgi_conf_dir) Zope2.Startup.run.make_wsgi_app(global_config, zope_conf) app = ZPublisher.WSGIPublisher.publish_module for spec, protocol, name, extra in reversed(self.pipeline): entrypoint = pkg_resources.get_entry_info(spec, protocol, name) app = entrypoint.load()(app, global_config, **extra) return app
def make_wsgi_app(self): self._wsgi_conf_dir = tempfile.mkdtemp() global_config = {'here': self._wsgi_conf_dir} zope_conf = self._get_zope_conf(self._wsgi_conf_dir) Zope2.Startup.run.make_wsgi_app(global_config, zope_conf) app = ZPublisher.WSGIPublisher.publish_module for spec, protocol, name, extra in reversed(self.pipeline): entrypoint = pkg_resources.get_entry_info(spec, protocol, name) app = entrypoint.load()(app, global_config, **extra) return app
def _check_extra_requires(entry_point_type, entry_point_name): """Check if extra requirements are installed""" entry_point = pkg_resources.get_entry_info('qiskit_aqua_interfaces', entry_point_type, entry_point_name) if not entry_point: raise ValueError("Entry Point not found: '{}' '{}'.".format(entry_point_type, entry_point_name)) # make sure that all extras are installed entry_point.require()
def run(self): if not self.output: print >> sys.stderr, "Need to specify an output directory" return if not self.distributions: print >> sys.stderr, "Need to specify at least one distribution" return if not os.path.exists(self.output): self.announce("Creating output dir %s" % self.output) os.mkdir(self.output) if self.compresslevel > 0 and not os.path.exists(self.yuicompressor): print >> sys.stderr, "Could not find YUICompressor at " + \ self.yuicompressor return tempdir = tempfile.mktemp() self.execute(os.makedirs, (tempdir, ), "Creating temp dir %s" % tempdir) if self.compresslevel > 0: if self.onepass: self.writer = OnePassCompressingWriter(self, tempdir) else: self.writer = CompressingWriter(self, tempdir) else: self.writer = FileWriter(self, tempdir) def resource_filter(resource): return True # try & locate a resource-filter for # the listed distributions. # all the found filters are combined, # and for a resource to end up in the # aggregation, they all must return True for distribution_name in self.distributions: ep = pkg_resources.get_entry_info(distribution_name, "toscawidgets.widgets", "resource_aggregation_filter") if ep is not None: def combine_predicates(a, b): def resource_filter(resource): return a(resource) and b(resource) return resource_filter resource_filter = combine_predicates(resource_filter, ep.load()) self.execute(self._collect_variant_resources, (resource_filter, ), "Collection resources for variant %s" % self.variant)
def test_cli_help(): entrypoint = pkg_resources.get_entry_info('lowatt_enedis', 'console_scripts', 'lowatt-enedis') func = entrypoint.load() stdout = io.StringIO() with pytest.raises(SystemExit) as cm, \ override_sys_argv(['lowatt-enedis', '--help']), \ contextlib.redirect_stdout(stdout): func() assert cm.value.code == 0 output = stdout.getvalue() assert output.startswith('usage: ')
def _entry_script(self, path): f = open(path, "r") lines = [f.readline(), f.readline()] del f eicomment = "# EASY-INSTALL-ENTRY-SCRIPT: " for line in lines: if line.startswith(eicomment): values = [x.strip("'\"") for x in line[len(eicomment) :].strip().split(",")] print path, "is an easy install entry script. running pkg_resources.require(%r)" % (values[0],) pkg_resources.require(values[0]) ep = pkg_resources.get_entry_info(*values) print "entry point is", ep return ep.module_name return None
def test_cli_help(): entrypoint = pkg_resources.get_entry_info( "lowatt_enedis", "console_scripts", "lowatt-enedis", ) func = entrypoint.load() stdout = io.StringIO() with pytest.raises(SystemExit) as cm, override_sys_argv( ["lowatt-enedis", "--help"], ), contextlib.redirect_stdout(stdout): func() assert cm.value.code == 0 output = stdout.getvalue() assert output.startswith("usage: ")
def test_copy_resource_tree(): dest = "/tmp/isomer-test/copy_resource_test" os.makedirs(dest, exist_ok=True) from pkg_resources import get_entry_info pkg_object = get_entry_info("isomer-test-module", "isomer.components", "testmanager") # pytest.exit(crap) copy_resource_tree("isomer-test-module", "frontend", dest) assert os.path.exists(os.path.join(dest, "test.module.js"))
class Constant(object): own = get_entry_info('repo-checker', 'console_scripts', 'checker') base_dir = os.path.join(own.dist.location) locale_dir = os.path.join(base_dir, own.module_name, 'locale') backend_dir = os.path.join(base_dir, own.module_name, 'backends') user_conf_dir = os.path.join(os.path.expanduser('~'), '.repo-checker') user_backend_dir = os.path.join(user_conf_dir, 'backends') user_conf_file = os.path.join(user_conf_dir, 'checker.ini') user_log_file = os.path.join(user_conf_dir, 'checker.log') worker_num = 5 retry = 2 retry_time = 5
def find_egg_ep(self, obj_type, name=None): if name is None: name = 'main' l = [] for protocol in obj_type.egg_protocols: pkg_resources.require(self.spec) entry = pkg_resources.get_entry_info(self.spec, protocol, name) if entry is not None: l.append((entry.load(), protocol, entry.name)) break if not l: raise LookupError("Entry point %r not found in egg %r" % (name, self.spec)) if len(l) > 1: raise LookupError("Ambiguous entry for %r in egg" % (name, self.spec)) return l[0]
def Entrypoint(dist, group, name, scripts=None, pathex=None, binaries=None, datas=None, hiddenimports=None, hookspath=None, excludes=None, runtime_hooks=None, cipher=None, win_no_prefer_redirects=False, win_private_assemblies=False): import pkg_resources # get toplevel packages of distribution from metadata def get_toplevel(dist): distribution = pkg_resources.get_distribution(dist) if distribution.has_metadata('top_level.txt'): return list(distribution.get_metadata('top_level.txt').split()) else: return [] hiddenimports = hiddenimports or [] packages = [] for distribution in hiddenimports: packages += get_toplevel(distribution) scripts = scripts or [] pathex = pathex or [] # get the entry point ep = pkg_resources.get_entry_info(dist, group, name) # insert path of the egg at the verify front of the search path pathex = [ep.dist.location] + pathex # script name must not be a valid module name to avoid name clashes on import script_path = os.path.join(workpath, name + '-script.py') print "creating script for entry point", dist, group, name with open(script_path, 'w') as fh: fh.write("import {0}\n".format(ep.module_name)) fh.write("{0}.{1}()\n".format(ep.module_name, '.'.join(ep.attrs))) for package in packages: fh.write("import {0}\n".format(package)) return Analysis([script_path] + scripts, pathex=pathex, binaries=binaries, datas=datas, hiddenimports=hiddenimports, hookspath=hookspath, excludes=excludes, runtime_hooks=runtime_hooks, cipher=cipher, win_no_prefer_redirects=win_no_prefer_redirects, win_private_assemblies=win_private_assemblies )
def _entry_script(self, path): f = open(path, 'r') lines = [f.readline(), f.readline()] del f eicomment = "# EASY-INSTALL-ENTRY-SCRIPT: " for line in lines: if line.startswith(eicomment): values = [ x.strip("'\"") for x in line[len(eicomment):].strip().split(",") ] print path, "is an easy install entry script. running pkg_resources.require(%r)" % ( values[0], ) pkg_resources.require(values[0]) ep = pkg_resources.get_entry_info(*values) print "entry point is", ep return ep.module_name return None
def find_entry_point_for_command(command_name): """Return an entry point for the given rbtools command. If no entry point is found, None is returned. """ # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. entry_point = pkg_resources.get_entry_info("rbtools", "rbtools_commands", command_name) if not entry_point: try: entry_point = next(pkg_resources.iter_entry_points("rbtools_commands", command_name)) except StopIteration: # There aren't any custom entry points defined. pass return entry_point
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = OptionParser(prog=RB_MAIN, usage='%prog [--version] <command> [options]' ' [<args>]', option_list=GLOBAL_OPTIONS, add_help_option=False, version='RBTools %s' % get_version_string()) parser.disable_interspersed_args() opt, args = parser.parse_args() if not args: help([], parser) command_name = args[0] if command_name == "help": help(args[1:], parser) elif opt.help or "--help" in args or '-h' in args: help(args, parser) # Attempt to retrieve the command class from the entry points. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write("Could not load command entry point %s\n" % ep.name) sys.exit(1) except Exception, e: sys.stderr.write("Unexpected error loading command %s: %s\n" % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN] + args)
def take_action(self, parsed_args): if parsed_args.distribution: LOG.debug( 'Loading %s from %s using distribution %s', parsed_args.name, parsed_args.group, parsed_args.distribution, ) dist = pkg_resources.get_distribution(parsed_args.distribution) ep = pkg_resources.get_entry_info( dist, parsed_args.group, parsed_args.name, ) else: LOG.debug( 'Looking for %s in group %s', parsed_args.name, parsed_args.group, ) try: ep = pkg_resources.iter_entry_points( parsed_args.group, parsed_args.name, ).next() except StopIteration: raise ValueError('Could not find %r in %r' % ( parsed_args.name, parsed_args.group, )) try: ep.load() except Exception: tb = traceback.format_exception(*sys.exc_info()) else: tb = '' return ( ('Module', 'Member', 'Distribution', 'Path', 'Error'), (ep.module_name, '.'.join(ep.attrs), str(ep.dist), ep.dist.location, tb), )
def run(self): if not self.output: print >> sys.stderr, "Need to specify an output directory" return if not self.distributions: print >> sys.stderr, "Need to specify at least one distribution" return if not os.path.exists(self.output): self.announce("Creating output dir %s" % self.output) os.mkdir(self.output) if self.compresslevel > 0 and not os.path.exists(self.yuicompressor): print >> sys.stderr, "Could not find YUICompressor at " + \ self.yuicompressor return tempdir = tempfile.mktemp() self.execute(os.makedirs, (tempdir,), "Creating temp dir %s" % tempdir) if self.compresslevel > 0: if self.onepass: self.writer = OnePassCompressingWriter(self, tempdir) else: self.writer = CompressingWriter(self, tempdir) else: self.writer = FileWriter(self, tempdir) def resource_filter(resource): return True # try & locate a resource-filter for # the listed distributions. # all the found filters are combined, # and for a resource to end up in the # aggregation, they all must return True for distribution_name in self.distributions: ep = pkg_resources.get_entry_info(distribution_name, "toscawidgets.widgets", "resource_aggregation_filter") if ep is not None: def combine_predicates(a, b): def resource_filter(resource): return a(resource) and b(resource) return resource_filter resource_filter = combine_predicates(resource_filter, ep.load()) self.execute(self._collect_variant_resources, (resource_filter,), "Collection resources for variant %s" % self.variant)
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = OptionParser(prog=RB_MAIN, usage='%prog [--version] <command> [options]' ' [<args>]', option_list=GLOBAL_OPTIONS, add_help_option=False, version='RBTools %s' % get_version_string()) parser.disable_interspersed_args() opt, args = parser.parse_args() if not args: help([], parser) command_name = args[0] if command_name == "help": help(args[1:], parser) elif opt.help or "--help" in args or '-h' in args: help(args, parser) # Attempt to retrieve the command class from the entry points. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write("Could not load command entry point %s\n" % ep.name) sys.exit(1) except Exception, e: sys.stderr.write("Unexpected error loading command %s: %s\n" % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN] + args)
def find_entry_point_for_command(command_name): """Return an entry point for the given rbtools command. If no entry point is found, None is returned. """ # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. entry_point = pkg_resources.get_entry_info('rbtools', 'rbtools_commands', command_name) if not entry_point: try: entry_point = next(pkg_resources.iter_entry_points( 'rbtools_commands', command_name)) except StopIteration: # There aren't any custom entry points defined. pass return entry_point
def find_egg_ep(self, obj_type, name=None): if name is None: name = 'main' l = [] for protocol in obj_type.egg_protocols: pkg_resources.require(self.spec) entry = pkg_resources.get_entry_info( self.spec, protocol, name) if entry is not None: l.append((entry.load(), protocol, entry.name)) break if not l: raise LookupError( "Entry point %r not found in egg %r" % (name, self.spec)) if len(l) > 1: raise LookupError( "Ambiguous entry for %r in egg" % (name, self.spec)) return l[0]
def take_action(self, parsed_args): if parsed_args.distribution: LOG.debug( 'Loading %s from %s using distribution %s', parsed_args.name, parsed_args.group, parsed_args.distribution, ) dist = pkg_resources.get_distribution(parsed_args.distribution) ep = pkg_resources.get_entry_info( dist, parsed_args.group, parsed_args.name, ) else: LOG.debug( 'Looking for %s in group %s', parsed_args.name, parsed_args.group, ) try: ep = pkg_resources.iter_entry_points( parsed_args.group, parsed_args.name, ).next() except StopIteration: raise ValueError('Could not find %r in %r' % ( parsed_args.name, parsed_args.group, )) try: ep.load() except Exception: tb = traceback.format_exception(*sys.exc_info()) else: tb = '' return ( ('Module', 'Member', 'Distribution', 'Path', 'Error'), (ep.module_name, '.'.join(ep.attrs), str(ep.dist), ep.dist.location, tb), )
def help(args, parser): if args: # TODO: First check for static help text file before # generating it at run time. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: help_text = build_help_text(ep.load()) print help_text sys.exit(0) print "No help found for %s" % args[0] sys.exit(0) parser.print_help() # We cast to a set to de-dupe the list, since third-parties may # try to override commands by using the same name, and then cast # back to a list for easy sorting. entrypoints = pkg_resources.iter_entry_points('rbtools_commands') commands = list(set([entrypoint.name for entrypoint in entrypoints])) common_commands = ['post', 'patch', 'close', 'diff'] print "\nThe most commonly used commands are:" for command in common_commands: print " %s" % command print "\nOther commands:" for command in sorted(commands): if command not in common_commands: print " %s" % command print( "See '%s help <command>' for more information " "on a specific command." % RB_MAIN) sys.exit(0)
def _get_command(command_name): """Return the command class for a given name. Args: command_name (unicode): The name of the command. Returns: rbpkg.commands.base.BaseCommand: The command class, or ``None`` if a command could not be found. """ ep = pkg_resources.get_entry_info('rbpkg', 'rbpkg_commands', command_name) if ep: try: return ep.load()() except Exception: logger.exception( 'There was an internal error loading the command "%s".', ep.name) sys.exit(1) return None
def help(args, parser): if args: # TODO: First check for static help text file before # generating it at run time. ep = pkg_resources.get_entry_info("rbtools", "rbtools_commands", args[0]) if ep: help_text = build_help_text(ep.load()) print help_text sys.exit(0) print "No help found for %s" % args[0] sys.exit(0) parser.print_help() # We cast to a set to de-dupe the list, since third-parties may # try to override commands by using the same name, and then cast # back to a list for easy sorting. entrypoints = pkg_resources.iter_entry_points('rbtools_commands') commands = list(set([ep.name for ep in entrypoints])) common_commands = ['post', 'patch', 'close', 'diff'] print "\nThe most commonly used commands are:" for command in common_commands: print " %s" % command print "\nOther commands:" for command in sorted(commands): if command not in common_commands: print " %s" % command print ("See '%s help <command>' for more information " "on a specific command." % RB_MAIN) sys.exit(0)
def test_plugins(group, name, cls, dist='custodia.ipa'): ep = pkg_resources.get_entry_info(dist, group, name) assert ep.dist.project_name == dist assert ep.resolve() is cls
def initialize(): global VIDEOPATHS, FALLBACKTOENG, SUBENG, LOGFILE, SUBNL, LOGLEVEL, SKIPHIDDENDIRS, \ SUBNL, LOGLEVEL, LOGLEVELCONSOLE, LOGSIZE, LOGNUM, LOGHTTPACCESS, LOGREVERSED, SKIPSHOW, SKIPSHOWUPPER, \ USERNAMEMAPPING, USERNAMEMAPPINGUPPER, NAMEMAPPING, NAMEMAPPINGUPPER, \ SHOWID_CACHE, POSTPROCESSCMD, CONFIGFILE, WORKDIR, NOTIFYEN, NOTIFYNL, \ MINMATCHSCORE, MINMATCHSCOREDEFAULT, MATCHQUALITY, MATCHCODEC, MATCHRELEASEGROUP, \ MAILSRV, MAILFROMADDR, MAILTOADDR, MAILUSERNAME, CONFIGVERSION, CONFIGUPGRADED, \ MAILPASSWORD, MAILSUBJECT, MAILENCRYPTION, \ GROWLHOST, GROWLPORT, GROWLPASS, \ TWITTERKEY, TWITTERSECRET, NMAAPI, NOTIFYMAIL, NOTIFYGROWL, NOTIFYTWITTER, NOTIFYNMA, \ WANTEDQUEUE, \ APIKEY, API, TIMEOUT, APICALLS, \ APICALLSLASTRESET, APICALLSRESETINT, APICALLSMAX, \ SCHEDULERSCANDISK, SCHEDULERCHECKSUB, SCHEDULERDOWNLOADSUBS, \ DAEMON, NOTIFYPROWL, PROWLAPI, PROWLPRIORITY, PUSHALOTAPI, NOTIFYPUSHALOT, \ SUBLIMINALPROVIDERSENTRYPOINT, SUBLIMINALPROVIDERS, SUBLIMINALPROVIDERLIST, SUBLIMINALCACHEFILE, DOGPILECACHEFILE, \ DBFILE, MOBILEUSERAGENTS, MOBILEAUTOSUB, \ USERAGENT, VERSIONURL, \ STARTED, PID DBFILE = 'database.db' SUBLIMINALPROVIDERSENTRYPOINT = pkg_resources.get_entry_info(dist='subliminal', group=None, name='subliminal.providers') SUBLIMINALCACHEFILE = 'subliminal.cache.dbm' DOGPILECACHEFILE = 'dogpile.cache.dbm' release = version.RELEASE_VERSION.split(' ')[0] versionnumber = version.RELEASE_VERSION.split(' ')[1] VERSIONURL = 'https://raw.github.com/h3llrais3r/Auto-Subliminal/master/autosubliminal/version.py' USERAGENT = 'Auto-Subliminal/' + versionnumber + release.lower()[0] WANTEDQUEUE = [] MINMATCHSCOREDEFAULT = 35 # Currently not used anymore (perhaps reuse it for tvdb api calls when a custom tvdb api key is needed?) APIKEY = "" API = "http://.../%s/" % APIKEY # Default http timeout TIMEOUT = 300 MOBILEUSERAGENTS = ["midp", "240x320", "blackberry", "netfront", "nokia", "panasonic", "portalmmm", "sharp", "sie-", "sonyericsson", "symbian", "windows ce", "benq", "mda", "mot-", "opera mini", "philips", "pocket pc", "sagem", "samsung", "sda", "sgh-", "vodafone", "xda", "palm", "iphone", "ipod", "ipad", "android", "windows phone"] MOBILEAUTOSUB = True # API settings APICALLSLASTRESET = time.time() APICALLSRESETINT = 86400 APICALLSMAX = 300 APICALLS = APICALLSMAX # Config file settings if CONFIGFILE is None: CONFIGFILE = "config.properties" config.read_config(CONFIGFILE) if CONFIGUPGRADED: print "INFO: Config seems to be upgraded. Writing config." config.write_config() print "INFO: Writing config done." # Change to the new work directory if os.path.exists(autosubliminal.PATH): os.chdir(autosubliminal.PATH) else: print "ERROR: PATH does not exist, check config" os._exit(1) # Database db.initialize() # Logging logger.initialize() # Subliminal settings _initialize_subliminal()
def initialize(): global CONFIGFILE, CONFIGVERSION, CONFIGUPGRADED, \ GITHUBURL, VERSIONURL, USERAGENT, SYSENCODING, TIMEOUT, MESSAGEQUEUE, WANTEDQUEUE, WANTEDQUEUELOCK, \ SCHEDULERS, SCANDISK, CHECKSUB, CHECKVERSION, \ DBFILE, DBVERSION, \ DAEMON, STARTED, PID, \ MOBILE, MOBILEUSERAGENTS, \ APIKEY, API, APICALLS, APICALLSLASTRESET, APICALLSRESETINT, APICALLSMAX, \ PATH, VIDEOPATHS, DEFAULTLANGUAGE, DEFAULTLANGUAGESUFFIX, ADDITIONALLANGUAGES, \ SCANDISKINTERVAL, CHECKSUBINTERVAL, CHECKVERSIONINTERVAL, CHECKVERSIONAUTOUPDATE, SCANEMBEDDEDSUBS, \ SKIPHIDDENDIRS, MINVIDEOFILESIZE, MAXDBRESULTS, \ LOGFILE, LOGLEVEL, LOGSIZE, LOGNUM, LOGHTTPACCESS, LOGDETAILEDFORMAT, LOGREVERSED, LOGLEVELCONSOLE, \ WEBSERVERIP, WEBSERVERPORT, WEBROOT, USERNAME, PASSWORD, LAUNCHBROWSER, \ SHOWMINMATCHSCORE, SHOWMINMATCHSCOREDEFAULT, SHOWMATCHSOURCE, SHOWMATCHQUALITY, SHOWMATCHCODEC, \ SHOWMATCHRELEASEGROUP, \ MOVIEMINMATCHSCORE, MOVIEMINMATCHSCOREDEFAULT, MOVIEMATCHSOURCE, MOVIEMATCHQUALITY, MOVIEMATCHCODEC, \ MOVIEMATCHRELEASEGROUP, \ SUBLIMINALPROVIDERSENTRYPOINT, SUBLIMINALPROVIDERS, SUBLIMINALPROVIDERLIST, SUBLIMINALPROVIDERCONFIGS, \ INCLUDEHEARINGIMPAIRED, ADDIC7EDUSERNAME, ADDIC7EDPASSWORD, OPENSUBTITLESUSERNAME, OPENSUBTITLESPASSWORD, \ USERSHOWNAMEMAPPING, USERSHOWNAMEMAPPINGUPPER, SHOWNAMEMAPPING, SHOWNAMEMAPPINGUPPER, \ USERMOVIENAMEMAPPING, USERMOVIENAMEMAPPINGUPPER, MOVIENAMEMAPPING, MOVIENAMEMAPPINGUPPER, \ SKIPSHOW, SKIPSHOWUPPER, \ SKIPMOVIE, SKIPMOVIEUPPER, \ NOTIFY, NOTIFYMAIL, MAILSRV, MAILFROMADDR, MAILTOADDR, MAILUSERNAME, MAILPASSWORD, MAILSUBJECT, MAILAUTH, \ MAILENCRYPTION, NOTIFYTWITTER, TWITTERKEY, TWITTERSECRET, NOTIFYPUSHALOT, PUSHALOTAPI, \ NOTIFYPUSHOVER, PUSHOVERKEY, PUSHOVERAPI, PUSHOVERDEVICES, NOTIFYNMA, NMAAPI, \ NOTIFYGROWL, GROWLHOST, GROWLPORT, GROWLPASS, NOTIFYPROWL, PROWLAPI, PROWLPRIORITY, \ NOTIFYPUSHBULLET, PUSHBULLETAPI, \ POSTPROCESS, POSTPROCESSUTF8ENCODING, SHOWPOSTPROCESSCMD, SHOWPOSTPROCESSCMDARGS, MOVIEPOSTPROCESSCMD, \ MOVIEPOSTPROCESSCMDARGS # Fake some entry points to get libraries working without installation _fake_entry_points() # Get fake subliminal providers entry point SUBLIMINALPROVIDERSENTRYPOINT = pkg_resources.get_entry_info(dist='fake_entry_points', group=None, name='subliminal.providers') # Subliminal settings _initialize_subliminal() # Version settings GITHUBURL = "https://github.com/h3llrais3r/Auto-Subliminal" VERSIONURL = "https://raw.github.com/h3llrais3r/Auto-Subliminal/master/autosubliminal/version.py" USERAGENT = "Auto-Subliminal/" + version.RELEASE_VERSION # Default http timeout TIMEOUT = 300 # Message queue (list of message dicts with 'message' and 'severity' key) # Possible values for 'severity' are: 'info', 'warning', 'error' MESSAGEQUEUE = [] # Wanted queue settings WANTEDQUEUE = [] WANTEDQUEUELOCK = False # Scheduler settings SCHEDULERS = {} # Startup settings STARTED = False # Mobile settings MOBILE = True MOBILEUSERAGENTS = ["midp", "240x320", "blackberry", "netfront", "nokia", "panasonic", "portalmmm", "sharp", "sie-", "sonyericsson", "symbian", "windows ce", "benq", "mda", "mot-", "opera mini", "philips", "pocket pc", "sagem", "samsung", "sda", "sgh-", "vodafone", "xda", "palm", "iphone", "ipod", "ipad", "android", "windows phone"] # API settings # Currently not used anymore (perhaps reuse it for tvdb api calls when a custom tvdb api key is needed?) APIKEY = "" API = "http://.../%s/" % APIKEY APICALLSLASTRESET = time.time() APICALLSRESETINT = 86400 APICALLSMAX = 300 APICALLS = APICALLSMAX # Score settings SHOWMINMATCHSCOREDEFAULT = 110 MOVIEMINMATCHSCOREDEFAULT = 35 # Webserver settings LAUNCHBROWSER = True # Config file settings CONFIGUPGRADED = False if CONFIGFILE is None: CONFIGFILE = "config.properties" config.read_config() if CONFIGUPGRADED: print "INFO: Config seems to be upgraded. Writing config." config.write_config() print "INFO: Writing config done." # Change to the new work directory if os.path.exists(PATH): os.chdir(PATH) else: print "ERROR: PATH does not exist, check config" os._exit(1) # Database DBFILE = 'database.db' db.initialize() # Logging logger.initialize()
def make_format_plugin_table(group="waveform", method="read", numspaces=4, unindent_first_line=True): """ Returns a markdown formatted table with read waveform plugins to insert in docstrings. >>> table = make_format_plugin_table("event", "write", 4, True) >>> print(table) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS ======... ===========... ========================================... Format Used Module _`Linked Function Call` ======... ===========... ========================================... CMTSOLUTION :mod:`...io.cmtsolution` :func:`..._write_cmtsolution` CNV :mod:`...io.cnv` :func:`obspy.io.cnv.core._write_cnv` JSON :mod:`...io.json` :func:`obspy.io.json.core._write_json` KML :mod:`obspy.io.kml` :func:`obspy.io.kml.core._write_kml` NLLOC_OBS :mod:`...io.nlloc` :func:`obspy.io.nlloc.core.write_nlloc_obs` NORDIC :mod:`obspy.io.nordic` :func:`obspy.io.nordic.core.write_select` QUAKEML :mod:`...io.quakeml` :func:`obspy.io.quakeml.core._write_quakeml` SC3ML :mod:`...io.seiscomp` :func:`obspy.io.seiscomp.event._write_sc3ml` SCARDEC :mod:`obspy.io.scardec` :func:`obspy.io.scardec.core._write_scardec` SHAPEFILE :mod:`obspy.io.shapefile` :func:`obspy.io.shapefile.core._write_shapefile` ZMAP :mod:`...io.zmap` :func:`obspy.io.zmap.core._write_zmap` ======... ===========... ========================================... :type group: str :param group: Plugin group to search (e.g. "waveform" or "event"). :type method: str :param method: Either 'read' or 'write' to select plugins based on either read or write capability. :type numspaces: int :param numspaces: Number of spaces prepended to each line (for indentation in docstrings). :type unindent_first_line: bool :param unindent_first_line: Determines if first line should start with prepended spaces or not. """ method = method.lower() if method not in ("read", "write"): raise ValueError("no valid type: %s" % method) method = "%sFormat" % method eps = _get_ordered_entry_points("obspy.plugin.%s" % group, method, WAVEFORM_PREFERRED_ORDER) mod_list = [] for name, ep in eps.items(): module_short = ":mod:`%s`" % ".".join(ep.module_name.split(".")[:3]) ep_list = [ep.dist.key, "obspy.plugin.%s.%s" % (group, name), method] entry_info = str(get_entry_info(*ep_list)) func_str = ':func:`%s`' % entry_info.split(' = ')[1].replace(':', '.') mod_list.append((name, module_short, func_str)) mod_list = sorted(mod_list) headers = ["Format", "Used Module", "_`Linked Function Call`"] maxlens = [ max([len(x[0]) for x in mod_list] + [len(headers[0])]), max([len(x[1]) for x in mod_list] + [len(headers[1])]), max([len(x[2]) for x in mod_list] + [len(headers[2])]) ] info_str = [" ".join(["=" * x for x in maxlens])] info_str.append(" ".join([headers[i].ljust(maxlens[i]) for i in range(3)])) info_str.append(info_str[0]) for mod_infos in mod_list: info_str.append(" ".join( [mod_infos[i].ljust(maxlens[i]) for i in range(3)])) info_str.append(info_str[0]) ret = " " * numspaces + ("\n" + " " * numspaces).join(info_str) if unindent_first_line: ret = ret[numspaces:] return ret
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = argparse.ArgumentParser( prog=RB_MAIN, usage='%(prog)s [--version] <command> [options] [<args>]', add_help=False) for option in GLOBAL_OPTIONS: option.add_to(parser) opt = parser.parse_args() if not opt.command: help([], parser) command_name = opt.command[0] args = opt.command[1:] if command_name == 'help': help(args, parser) elif opt.help or '--help' in args or '-h' in args: help(opt.command, parser) # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. ep = pkg_resources.get_entry_info('rbtools', 'rbtools_commands', command_name) if not ep: try: ep = next(pkg_resources.iter_entry_points( 'rbtools_commands', command_name)) except StopIteration: # There aren't any custom entry points defined. pass if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write('Could not load command entry point %s\n' % ep.name) sys.exit(1) except Exception as e: sys.stderr.write('Unexpected error loading command %s: %s\n' % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN, command_name] + args) else: # A command class could not be found, so try and execute # the "rb-<command>" on the system. try: sys.exit( subprocess.call(['%s-%s' % (RB_MAIN, command_name)] + args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, env=os.environ.copy())) except OSError: # OSError is only raised in this scenario when subprocess.call # cannot find an executable with the name rbt-<command_name>. If # this command doesn't exist, we will check if an alias exists # with the name before printing an error message. pass aliases = load_config().get('ALIASES', {}) if command_name in aliases: sys.exit(run_alias(aliases[command_name], args)) else: parser.error('"%s" is not a command' % command_name)
def initialize(): global CONFIGFILE, CONFIGVERSION, CONFIGUPGRADED, \ GITHUBURL, VERSIONURL, USERAGENT, SYSENCODING, TIMEOUT, MESSAGEQUEUE, WANTEDQUEUE, WANTEDQUEUELOCK, \ SCHEDULERS, SCANDISK, CHECKSUB, CHECKVERSION, \ DBFILE, DBVERSION, \ DAEMON, STARTED, PID, \ MOBILE, MOBILEUSERAGENTS, \ APIKEY, API, APICALLS, APICALLSLASTRESET, APICALLSRESETINT, APICALLSMAX, \ PATH, VIDEOPATHS, DEFAULTLANGUAGE, DEFAULTLANGUAGESUFFIX, ADDITIONALLANGUAGES, \ SCANDISKINTERVAL, CHECKSUBINTERVAL, CHECKVERSIONINTERVAL, CHECKVERSIONAUTOUPDATE, SCANEMBEDDEDSUBS, \ SKIPHIDDENDIRS, MINVIDEOFILESIZE, MAXDBRESULTS, \ LOGFILE, LOGLEVEL, LOGSIZE, LOGNUM, LOGHTTPACCESS, LOGDETAILEDFORMAT, LOGREVERSED, LOGLEVELCONSOLE, \ WEBSERVERIP, WEBSERVERPORT, WEBROOT, USERNAME, PASSWORD, LAUNCHBROWSER, \ SHOWMINMATCHSCORE, SHOWMINMATCHSCOREDEFAULT, SHOWMATCHSOURCE, SHOWMATCHQUALITY, SHOWMATCHCODEC, \ SHOWMATCHRELEASEGROUP, \ MOVIEMINMATCHSCORE, MOVIEMINMATCHSCOREDEFAULT, MOVIEMATCHSOURCE, MOVIEMATCHQUALITY, MOVIEMATCHCODEC, \ MOVIEMATCHRELEASEGROUP, \ SUBLIMINALPROVIDERSENTRYPOINT, SUBLIMINALPROVIDERS, SUBLIMINALPROVIDERLIST, SUBLIMINALPROVIDERCONFIGS, \ INCLUDEHEARINGIMPAIRED, ADDIC7EDUSERNAME, ADDIC7EDPASSWORD, OPENSUBTITLESUSERNAME, OPENSUBTITLESPASSWORD, \ USERSHOWNAMEMAPPING, USERSHOWNAMEMAPPINGUPPER, SHOWNAMEMAPPING, SHOWNAMEMAPPINGUPPER, \ USERMOVIENAMEMAPPING, USERMOVIENAMEMAPPINGUPPER, MOVIENAMEMAPPING, MOVIENAMEMAPPINGUPPER, \ SKIPSHOW, SKIPSHOWUPPER, \ SKIPMOVIE, SKIPMOVIEUPPER, \ NOTIFY, NOTIFYMAIL, MAILSRV, MAILFROMADDR, MAILTOADDR, MAILUSERNAME, MAILPASSWORD, MAILSUBJECT, MAILAUTH, \ MAILENCRYPTION, NOTIFYTWITTER, TWITTERKEY, TWITTERSECRET, NOTIFYPUSHALOT, PUSHALOTAPI, \ NOTIFYPUSHOVER, PUSHOVERKEY, PUSHOVERAPI, PUSHOVERDEVICES, NOTIFYNMA, NMAAPI, \ NOTIFYGROWL, GROWLHOST, GROWLPORT, GROWLPASS, NOTIFYPROWL, PROWLAPI, PROWLPRIORITY, \ NOTIFYPUSHBULLET, PUSHBULLETAPI, \ POSTPROCESS, POSTPROCESSUTF8ENCODING, SHOWPOSTPROCESSCMD, SHOWPOSTPROCESSCMDARGS, MOVIEPOSTPROCESSCMD, \ MOVIEPOSTPROCESSCMDARGS # Fake some entry points to get libraries working without installation _fake_entry_points() # Get fake subliminal providers entry point SUBLIMINALPROVIDERSENTRYPOINT = pkg_resources.get_entry_info( dist='fake_entry_points', group=None, name='subliminal.providers') # Subliminal settings _initialize_subliminal() # Version settings GITHUBURL = "https://github.com/h3llrais3r/Auto-Subliminal" VERSIONURL = "https://raw.github.com/h3llrais3r/Auto-Subliminal/master/autosubliminal/version.py" USERAGENT = "Auto-Subliminal/" + version.RELEASE_VERSION # Default http timeout TIMEOUT = 300 # Message queue (list of message dicts with 'message' and 'severity' key) # Possible values for 'severity' are: 'info', 'warning', 'error' MESSAGEQUEUE = [] # Wanted queue settings WANTEDQUEUE = [] WANTEDQUEUELOCK = False # Scheduler settings SCHEDULERS = {} # Startup settings STARTED = False # Mobile settings MOBILE = True MOBILEUSERAGENTS = [ "midp", "240x320", "blackberry", "netfront", "nokia", "panasonic", "portalmmm", "sharp", "sie-", "sonyericsson", "symbian", "windows ce", "benq", "mda", "mot-", "opera mini", "philips", "pocket pc", "sagem", "samsung", "sda", "sgh-", "vodafone", "xda", "palm", "iphone", "ipod", "ipad", "android", "windows phone" ] # API settings # Currently not used anymore (perhaps reuse it for tvdb api calls when a custom tvdb api key is needed?) APIKEY = "" API = "http://.../%s/" % APIKEY APICALLSLASTRESET = time.time() APICALLSRESETINT = 86400 APICALLSMAX = 300 APICALLS = APICALLSMAX # Score settings SHOWMINMATCHSCOREDEFAULT = 110 MOVIEMINMATCHSCOREDEFAULT = 35 # Webserver settings LAUNCHBROWSER = True # Config file settings CONFIGUPGRADED = False if CONFIGFILE is None: CONFIGFILE = "config.properties" config.read_config() if CONFIGUPGRADED: print "INFO: Config seems to be upgraded. Writing config." config.write_config() print "INFO: Writing config done." # Change to the new work directory if os.path.exists(PATH): os.chdir(PATH) else: print "ERROR: PATH does not exist, check config" os._exit(1) # Database DBFILE = 'database.db' db.initialize() # Logging logger.initialize()
def main(): """Execute a command.""" def exit_on_int(sig, frame): sys.exit(128 + sig) signal.signal(signal.SIGINT, exit_on_int) parser = argparse.ArgumentParser( prog=RB_MAIN, usage='%(prog)s [--version] <command> [options] [<args>]', add_help=False) for option in GLOBAL_OPTIONS: option.add_to(parser) opt = parser.parse_args() if not opt.command: help([], parser) command_name = opt.command[0] args = opt.command[1:] if command_name == 'help': help(args, parser) elif opt.help or '--help' in args or '-h' in args: help(opt.command, parser) # Attempt to retrieve the command class from the entry points. We # first look in rbtools for the commands, and failing that, we look # for third-party commands. ep = pkg_resources.get_entry_info('rbtools', 'rbtools_commands', command_name) if not ep: try: ep = next( pkg_resources.iter_entry_points('rbtools_commands', command_name)) except StopIteration: # There aren't any custom entry points defined. pass if ep: try: command = ep.load()() except ImportError: # TODO: It might be useful to actual have the strack # trace here, due to an import somewhere down the import # chain failing. sys.stderr.write('Could not load command entry point %s\n' % ep.name) sys.exit(1) except Exception as e: sys.stderr.write('Unexpected error loading command %s: %s\n' % (ep.name, e)) sys.exit(1) command.run_from_argv([RB_MAIN, command_name] + args) else: # A command class could not be found, so try and execute # the "rb-<command>" on the system. try: sys.exit( subprocess.call(['%s-%s' % (RB_MAIN, command_name)] + args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, env=os.environ.copy())) except OSError: # OSError is only raised in this scenario when subprocess.call # cannot find an executable with the name rbt-<command_name>. If # this command doesn't exist, we will check if an alias exists # with the name before printing an error message. pass aliases = load_config().get('ALIASES', {}) if command_name in aliases: sys.exit(run_alias(aliases[command_name], args)) else: parser.error('"%s" is not a command' % command_name)
def get_plugin_paths(self, kind, plugin_type): import pkg_resources # Sources and elements are looked up in separate # entrypoint groups from the same package. # if plugin_type == PluginType.SOURCE: entrypoint_group = "buildstream.plugins.sources" elif plugin_type == PluginType.ELEMENT: entrypoint_group = "buildstream.plugins.elements" # key by a tuple to avoid collision try: package = pkg_resources.get_entry_info(self._package_name, entrypoint_group, kind) except pkg_resources.DistributionNotFound as e: raise PluginError( "{}: Failed to load {} plugin '{}': {}".format( self.provenance_node.get_provenance(), plugin_type, kind, e ), reason="package-not-found", ) from e except pkg_resources.VersionConflict as e: raise PluginError( "{}: Version conflict encountered while loading {} plugin '{}'".format( self.provenance_node.get_provenance(), plugin_type, kind ), detail=e.report(), reason="package-version-conflict", ) from e except pkg_resources.RequirementParseError as e: raise PluginError( "{}: Malformed package-name '{}' encountered: {}".format( self.provenance_node.get_provenance(), self._package_name, e ), reason="package-malformed-requirement", ) from e if package is None: raise PluginError( "{}: Pip package {} does not contain a plugin named '{}'".format( self.provenance_node.get_provenance(), self._package_name, kind ), reason="plugin-not-found", ) location = package.dist.get_resource_filename( pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py" ) # Also load the defaults - required since setuptools # may need to extract the file. try: defaults = package.dist.get_resource_filename( pkg_resources._manager, package.module_name.replace(".", os.sep) + ".yaml" ) except KeyError: # The plugin didn't have an accompanying YAML file defaults = None return ( os.path.dirname(location), defaults, "python package '{}' at: {}".format(package.dist, package.dist.location), )