Example #1
0
def status(cmd_proc, show_password):
    """Show current status"""
    cmd_proc.save_current_config()
    result = cmd_proc.re_login()
    if not result:
        utils.print_error('Not logged in', cmd_proc)
    headers = ['Key', 'Value']
    table = []
    table.append(['vca_cli_version',
                 pkg_resources.require("vca-cli")[0].version])
    table.append(['pyvcloud_version',
                 pkg_resources.require("pyvcloud")[0].version])
    table.append(['profile_file', cmd_proc.profile_file])
    table.append(['profile', cmd_proc.profile])
    table.append(['host', cmd_proc.vca.host])
    table.append(['host_score', cmd_proc.host_score])
    table.append(['user', cmd_proc.vca.username])
    table.append(['instance', cmd_proc.instance])
    table.append(['org', cmd_proc.vca.org])
    table.append(['vdc', cmd_proc.vdc_name])
    table.append(['gateway', cmd_proc.gateway])
    if cmd_proc.password is None or len(cmd_proc.password) == 0 or \
       show_password:
        table.append(['password', str(cmd_proc.password)])
    else:
        table.append(['password', '<encrypted>'])
    if cmd_proc.vca is not None:
        table.append(['type', cmd_proc.vca.service_type])
        table.append(['version', cmd_proc.vca.version])
        if cmd_proc.vca.vcloud_session is not None:
            table.append(['org_url', cmd_proc.vca.vcloud_session.url])
    table.append(['active session', str(result)])
    utils.print_table('Status:', headers, table, cmd_proc)
Example #2
0
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
        to_dir=os.curdir, download_delay=15):
    to_dir = os.path.abspath(to_dir)
    rep_modules = 'pkg_resources', 'setuptools'
    imported = set(sys.modules).intersection(rep_modules)
    try:
        import pkg_resources
    except ImportError:
        return _do_download(version, download_base, to_dir, download_delay)
    try:
        pkg_resources.require("setuptools>=" + version)
        return
    except pkg_resources.DistributionNotFound:
        return _do_download(version, download_base, to_dir, download_delay)
    except pkg_resources.VersionConflict as VC_err:
        if imported:
            msg = textwrap.dedent("""
                The required version of setuptools (>={version}) is not available,
                and can't be installed while this script is running. Please
                install a more recent version first, using
                'easy_install -U setuptools'.

                (Currently using {VC_err.args[0]!r})
                """).format(VC_err=VC_err, version=version)
            sys.stderr.write(msg)
            sys.exit(2)

        # otherwise, reload ok
        del pkg_resources, sys.modules['pkg_resources']
        return _do_download(version, download_base, to_dir, download_delay)
Example #3
0
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
                   to_dir=os.curdir, download_delay=15):
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    was_imported = 'pkg_resources' in sys.modules or \
        'setuptools' in sys.modules
    try:
        import pkg_resources
    except ImportError:
        return _do_download(version, download_base, to_dir, download_delay)
    try:
        pkg_resources.require("setuptools>=" + version)
        return
    except pkg_resources.VersionConflict:
        e = sys.exc_info()[1]
        if was_imported:
            sys.stderr.write(
            "The required version of setuptools (>=%s) is not available,\n"
            "and can't be installed while this script is running. Please\n"
            "install a more recent version first, using\n"
            "'easy_install -U setuptools'."
            "\n\n(Currently using %r)\n" % (version, e.args[0]))
            sys.exit(2)
        else:
            del pkg_resources, sys.modules['pkg_resources']    # reload ok
            return _do_download(version, download_base, to_dir,
                                download_delay)
    except pkg_resources.DistributionNotFound:
        return _do_download(version, download_base, to_dir,
                            download_delay)
Example #4
0
def assert_packages():
    try:
        pkg_resources.require(get_dependencies())
    except VersionConflict as e:
        print("Missing a library requirement, please update:")
        print(str(e))
        sys.exit(0)
Example #5
0
 def find_egg_entry_point(self, object_type, name=None):
     """
     Returns the (entry_point, protocol) for the with the given
     ``name``.
     """
     if name is None:
         name = 'main'
     possible = []
     for protocol_options in object_type.egg_protocols:
         for protocol in protocol_options:
             pkg_resources.require(self.spec)
             entry = pkg_resources.get_entry_info(
                 self.spec,
                 protocol,
                 name)
             if entry is not None:
                 possible.append((entry.load(), protocol, entry.name))
                 break
     if not possible:
         # Better exception
         dist = pkg_resources.get_distribution(self.spec)
         raise LookupError(
             "Entry point %r not found in egg %r (dir: %s; protocols: %s; "
             "entry_points: %s)"
             % (name, self.spec,
                dist.location,
                ', '.join(_flatten(object_type.egg_protocols)),
                ', '.join(_flatten([
             dictkeys(pkg_resources.get_entry_info(self.spec, prot, name) or {})
             for prot in protocol_options] or '(no entry points)'))))
     if len(possible) > 1:
         raise LookupError(
             "Ambiguous entry points for %r in egg %r (protocols: %s)"
             % (name, self.spec, ', '.join(_flatten(protocol_options))))
     return possible[0]
Example #6
0
    def with_project_on_sys_path(self, func):
        # Ensure metadata is up-to-date
        self.reinitialize_command('build_py', inplace=0)
        self.run_command('build_py')
        bpy_cmd = self.get_finalized_command("build_py")
        build_path = normalize_path(bpy_cmd.build_lib)

        # Build extensions
        self.reinitialize_command('egg_info', egg_base=build_path)
        self.run_command('egg_info')

        self.reinitialize_command('build_ext', inplace=0)
        self.run_command('build_ext')

        ei_cmd = self.get_finalized_command("egg_info")

        old_path = sys.path[:]
        old_modules = sys.modules.copy()

        try:
            sys.path.insert(0, normalize_path(ei_cmd.egg_base))
            working_set.__init__()
            add_activation_listener(lambda dist: dist.activate())
            require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
            func()
        finally:
            sys.path[:] = old_path
            sys.modules.clear()
            sys.modules.update(old_modules)
            working_set.__init__()
Example #7
0
def use_setuptools(
    version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
    download_delay=15
):
    """Automatically find/download setuptools and make it available on sys.path

    `version` should be a valid setuptools version number that is available
    as an egg for download under the `download_base` URL (which should end with
    a '/').  `to_dir` is the directory where setuptools will be downloaded, if
    it is not already available.  If `download_delay` is specified, it should
    be the number of seconds that will be paused before initiating a download,
    should one be required.  If an older version of setuptools is installed,
    this routine will print a message to ``sys.stderr`` and raise SystemExit in
    an attempt to abort the calling script.
    """
    was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
    def do_download():
        egg = download_setuptools(version, download_base, to_dir, download_delay)
        sys.path.insert(0, egg)
        import setuptools; setuptools.bootstrap_install_from = egg
    try:
        import pkg_resources
    except ImportError:
        return do_download()       
    try:
        pkg_resources.require("setuptools>="+version); return
    except pkg_resources.VersionConflict, e:
        if was_imported:
            print >>sys.stderr, (
            "The required version of setuptools (>=%s) is not available, and\n"
            "can't be installed while this script is running. Please install\n"
            " a more recent version first, using 'easy_install -U setuptools'."
            "\n\n(Currently using %r)"
            ) % (version, e.args[0])
            sys.exit(2)
Example #8
0
def is_installed(requirement):
    try:
        pkg_resources.require(requirement)
    except pkg_resources.ResolutionError:
        return False
    else:
        return True
Example #9
0
def require_one(one):
    try:
        pkg_resources.require(one)
    except pkg_resources.DistributionNotFound as e:
        return 'package not found: %s' % e
    except pkg_resources.VersionConflict as e:
        return 'package version conflict: found: %s, expected: %s' % (str(e[0]), str(e[1]))
Example #10
0
File: config.py Project: binyam/bob
def my_eggs():
  """Returns currently installed egg resources"""

  installed = pkg_resources.require('bob')

  keys = [k.key for k in installed[0].requires()]
  return [k for k in pkg_resources.require('bob') if k.key in keys]
Example #11
0
    def add_project_to_sys_path(self):
        from pkg_resources import normalize_path, add_activation_listener
        from pkg_resources import working_set, require

        self.reinitialize_command('egg_info')
        self.run_command('egg_info')
        self.reinitialize_command('build_ext', inplace=1)
        self.run_command('build_ext')


        # Check if this distribution is already on sys.path
        # and remove that version, this ensures that the right
        # copy of the package gets tested.

        self.__old_path = sys.path[:]
        self.__old_modules = sys.modules.copy()


        ei_cmd = self.get_finalized_command('egg_info')
        sys.path.insert(0, normalize_path(ei_cmd.egg_base))
        sys.path.insert(1, os.path.dirname(__file__))

        # Strip the namespace packages defined in this distribution
        # from sys.modules, needed to reset the search path for
        # those modules.

        nspkgs = getattr(self.distribution, 'namespace_packages')
        if nspkgs is not None:
            for nm in nspkgs:
                del sys.modules[nm]

        # Reset pkg_resources state:
        add_activation_listener(lambda dist: dist.activate())
        working_set.__init__()
        require('%s==%s'%(ei_cmd.egg_name, ei_cmd.egg_version))
Example #12
0
def skip_if_missing_requirements(*requirements):
    try:
        pkg_resources.require(*requirements)
        msg = ''
    except pkg_resources.DistributionNotFound:
        msg = 'Missing one or more requirements (%s)' % '|'.join(requirements)
    return skipUnless(msg == '', msg)
Example #13
0
def check_requirements(path):
    """ checks requirements.txt modules are installed """
    modules, missing  = import_package(path)
    name = os.path.basename(path)
    try:
        f = open(os.path.join(path, "requirements.txt"))
    except:
        log.info("%s no requirements.txt found"%name)
        return
    
    errors = 0
    for req in f.readlines():   
        try:
            require(req)
        except VersionConflict as e:
            log.warning("%s:\nFound=%s\nRequired=%s"%(name, e.dist,
                                                   e.req.specs))
            errors += 1
        except (DistributionNotFound, Exception) as e:
            # e.g. pkg_resources.require(pkg_resources) throws DistributionNotFound
            try:
                import_module(req.split("==")[0].strip("\n"))
            except:
                log.warning("%s: %s"%(name, e))
                errors += 1
    if not errors:
        log.info("%s requirements are all installed"%name)
Example #14
0
	def setup_widgets(self, app):
		self.builder = UIBuilder()
		# Fix icon path
		self.builder.replace_icon_path("icons/", self.iconpath)
		# Load glade file
		self.builder.add_from_file(os.path.join(self.gladepath, "about.glade"))
		self.builder.connect_signals(self)
		self.dialog = self.builder.get_object("dialog")
		# Get app version
		app_ver = "unknown"
		try:
			if IS_WINDOWS:
				# pkg_resources will not work on cx_Frozen package
				from syncthing_gtk.tools import get_install_path
				with open(os.path.join(get_install_path(), "__version__"), "r") as vfile:
					app_ver = vfile.read().strip(" \t\r\n")
			else:
				import pkg_resources, syncthing_gtk
				if syncthing_gtk.__file__.startswith(pkg_resources.require("syncthing-gtk")[0].location):
					app_ver = pkg_resources.require("syncthing-gtk")[0].version
		except:
			# pkg_resources is not available or __version__ file missing
			# There is no reason to crash on this.
			pass
		# Get daemon version
		try:
			daemon_ver = app.daemon.get_version()
			app_ver = "%s (Daemon %s)" % (app_ver, daemon_ver)
		except:
			# App is None or daemon version is not yet known
			pass
		# Display versions in UI
		self.builder.get_object("lblVersion").set_label(app_ver)
Example #15
0
def cli():
    parser = argparse.ArgumentParser(
        prog=PROJECT_NAME,
        description='Exercise keystone using python-keystoneclient')
    parser.add_argument(
        '--os-token', default='ADMIN')
    parser.add_argument(
        '--os-endpoint', default='http://localhost:35357/v3')
    parser.add_argument(
        '--default-domain-id', default='default')
    parser.add_argument(
        '--debug', action='store_true', default=False)
    parser.add_argument(
        '--version', action='store_true',
        help='Show version number and exit')

    subparsers = parser.add_subparsers(title='subcommands')

    for attr in dir(subcommands):
        ref = getattr(subcommands, attr)
        # find classes extending of SubCommand
        if (type(ref) is type
                and ref != subcommands.SubCommand
                and issubclass(ref, subcommands.SubCommand)):
            subparser = subparsers.add_parser(ref.command)
            ref.configure_parser(subparser)
            subparser.set_defaults(func=ref())

    args = parser.parse_args()

    if args.version:
        print pkg_resources.require(PROJECT_NAME)[0]
        sys.exit()

    args.func(args)
Example #16
0
    def test_models_sync(self):
        # recent versions of sqlalchemy and alembic are needed for running of
        # this test, but we already have them in requirements
        try:
            pkg.require('sqlalchemy>=0.8.4', 'alembic>=0.6.2')
        except (pkg.VersionConflict, pkg.DistributionNotFound) as e:
            self.skipTest('sqlalchemy>=0.8.4 and alembic>=0.6.3 are required'
                          ' for running of this test: %s' % e)

        # drop all tables after a test run
        self.addCleanup(self._cleanup)

        # run migration scripts
        self.db_sync(self.get_engine())

        with self.get_engine().connect() as conn:
            opts = {
                'include_object': self.include_object,
                'compare_type': self.compare_type,
                'compare_server_default': self.compare_server_default,
            }
            mc = alembic.migration.MigrationContext.configure(conn, opts=opts)

            # compare schemas and fail with diff, if it's not empty
            diff = alembic.autogenerate.compare_metadata(mc,
                                                         self.get_metadata())
            if diff:
                msg = pprint.pformat(diff, indent=2, width=20)
                self.fail(
                    "Models and migration scripts aren't in sync:\n%s" % msg)
Example #17
0
    def webui_command(self, options):
        """
        Handles the 'webui' CLI command.

        :param options: argparse options
        """
        if self.is_daemon:
            log.error('Webui or daemon is already running.')
            return
        # TODO: make webui an enablable plugin in regular daemon mode
        try:
            pkg_resources.require('flexget[webui]')
        except pkg_resources.DistributionNotFound as e:
            log.error('Dependency not met. %s' % e)
            log.error('Webui dependencies not installed. You can use `pip install flexget[webui]` to install them.')
            self.shutdown()
            return
        if options.daemonize:
            self.daemonize()
        self.is_daemon = True
        from flexget.ui import webui
        self.task_queue.start()
        self.ipc_server.start()
        webui.start(self)
        self.task_queue.wait()
Example #18
0
def check_requirements(requirements_path):
    # Check our requirements package
    failed_deps = []
    with open(requirements_path, 'r') as f:
        for x in f:
            x = x.strip()
            if not x or x.startswith('#'):
                # skip blank lines and comments
                continue
            elif x.startswith('-e ') or '://' in x:
                # ignore vcs URIs
                # XXX(dlitz): we could probably parse `#egg=<version_req>` from
                # the URI if we wanted to, assuming we want to parse
                # requirements.txt ourselves at all.
                continue
            try:
                require(x)
            except (DistributionNotFound, VersionConflict):
                failed_deps.append(x)

    if failed_deps:
        raise ImportError(
            '\nPython module dependency verification failed! \n\n'
            'The following dependencies are either missing or out of '
            'date: \n\t{}\n\nYou probably need to run --> sudo pip '
            'install -r requirements.txt\n'
            .format('\n\t'.join(failed_deps)))
Example #19
0
 def __call__(self, environ, start_response):
     global wsgilib
     if wsgilib is None:
         import pkg_resources
         pkg_resources.require('Paste')
         from paste import wsgilib
     popped_config = None
     if 'paste.config' in environ:
         popped_config = environ['paste.config']        
     conf = environ['paste.config'] = self.config.copy()
     app_iter = None
     CONFIG.push_thread_config(conf)
     try:
         app_iter = self.application(environ, start_response)
     finally:
         if app_iter is None:
             # An error occurred...
             CONFIG.pop_thread_config(conf)
             if popped_config is not None:
                 environ['paste.config'] = popped_config
     if type(app_iter) in (list, tuple):
         # Because it is a concrete iterator (not a generator) we
         # know the configuration for this thread is no longer
         # needed:
         CONFIG.pop_thread_config(conf)
         if popped_config is not None:
             environ['paste.config'] = popped_config
         return app_iter
     else:
         def close_config():
             CONFIG.pop_thread_config(conf)
         new_app_iter = wsgilib.add_close(app_iter, close_config)
         return new_app_iter
Example #20
0
    def get_version_info(self, pack=None):
        import pkg_resources
        pkg_list = ["scioncc"]

        packs = self.config.get_safe(CFG_PREFIX + ".version_packages")
        if packs:
            pkg_list.extend(packs.split(","))

        version = {}
        for package in pkg_list:
            try:
                if pack == "all":
                    pack_deps = pkg_resources.require(package)
                    version.update({p.project_name: p.version for p in pack_deps})
                else:
                    version[package] = pkg_resources.require(package)[0].version
                # @TODO git versions for current?
            except pkg_resources.DistributionNotFound:
                pass

        try:
            dir_client = DirectoryServiceProcessClient(process=self.process)
            sys_attrs = dir_client.lookup("/System")
            if sys_attrs and isinstance(sys_attrs, dict):
                version.update({k: v for (k, v) in sys_attrs.iteritems() if "version" in k.lower()})
        except Exception as ex:
            log.exception("Could not determine system directory attributes")

        if pack and pack != "all":
            version = {k: v for (k, v) in version.iteritems() if k == pack}

        return self.gateway_json_response(version)
Example #21
0
    def postinstall(self, dist):
        """ call postinstall scripts """
        print("Post installation")

        if (dist):
            pkg_resources.require(dist.project_name)
            sys.path.append(dist.location)

        try:
            lstr = dist.get_metadata("postinstall_scripts.txt")
        except:
            lstr = []

        # Add pywin32 path
        if ('win32' in sys.platform):
            try:
                win32dir = pj(get_base_dir('pywin32'), 'pywin32_system32')

                if (win32dir not in os.environ['PATH']):
                    os.environ['PATH'] += ";" + win32dir
            except:
                print("!!Error : pywin32 package not found. Please install it before.")

        # process postinstall
        for s in pkg_resources.yield_lines(lstr):
            print("Executing %s" % (s))

            try:
                module = __import__(s, globals(), locals(), s.split('.'))
                module.install()

            except Exception as e:
                print("Warning : Cannot execute %s" % (s,))
                print(e)
Example #22
0
def check_requirements(requires):
    """
    :param requires:
        List of requirements. If a list item is itself a list, this function checks if
        any item of this list is already installed (in order) and uses this as requirement.
        If non is installed the first item of the list is used.
    :return: List of requirements
    """
    requirements = []
    import pkg_resources

    for line in requires:
        if isinstance(line, list):
            is_installed = False
            for req in line:
                try:
                    pkg_resources.require(req)
                    requirements.append(req)
                except pkg_resources.DistributionNotFound:
                    pass
                except pkg_resources.VersionConflict:
                    pass
                else:
                    is_installed = True
            if not is_installed:
                requirements.append(line[0])
        else:
            requirements.append(line)
    return requirements
def is_installed(package_name):
    try:
        require(package_name)
    except DistributionNotFound:
        return False
    else:
        return True
Example #24
0
def check_dependencies(file):
    ''' Verify all necessary dependencies are installed '''
    for dep in file:
        dep = dep.replace('\n', '')
        # skip all comments and blank lines in dependency file
        if '#' in dep or not dep:
            continue
        try:
            require(dep)
            success(dep)
        except DistributionNotFound as df:
            fail(dep)
            dep = str(df).split(' ')[1][1:-1]
            print('\n' + dep + ' dependency missing.')
            print('Please install it using "pip/conda install ' + dep + '"')
            fail("\nDependencies")
            end()
        except VersionConflict as vc:
            fail(dep)
            print("\nRequired version and installed version differ for the "
                  "following package:\n"
                  "Required version: " + dep)
            dep_name = str(vc).split(' ')[0][1:]  # First element is '('
            dep_version = str(vc).split(' ')[1]
            print("Installed version: " + dep_name + "==" + dep_version)
            fail("\nDependencies")
            end()
Example #25
0
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
                   to_dir=os.curdir, download_delay=15):
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    was_imported = 'pkg_resources' in sys.modules or \
        'setuptools' in sys.modules
    try:
        import pkg_resources
        if not hasattr(pkg_resources, '_distribute'):
            raise ImportError
    except ImportError:
        return _do_download(version, download_base, to_dir, download_delay)
    try:
        pkg_resources.require("distribute>="+version)
        return
    except pkg_resources.VersionConflict, e:
        if was_imported:
            print >>sys.stderr, (
            "The required version of distribute (>=%s) is not available, and\n"
            "can't be installed while this script is running. Please install\n"
            " a more recent version first, using 'easy_install -U distribute'."
            "\n\n(Currently using %r)") % (version, e.args[0])
            sys.exit(2)
        else:
            del pkg_resources, sys.modules['pkg_resources']    # reload ok
            return _do_download(version, download_base, to_dir, download_delay)
Example #26
0
def version_table():
  """Returns a summarized version table of all software compiled in, with their
  respective versions."""

  space = ' '
  packsize = 20
  descsize = 55 

  version_dict = {}
  version_dict.update(bob.core.version)
  version_dict.update(bob.io.version)
  version_dict.update(bob.sp.version)
  version_dict.update(bob.ip.version)
  if hasattr(bob.machine, 'version'): 
    version_dict.update(bob.machine.version)
  if bob.has_daq and hasattr(bob.daq, 'version'):
    version_dict.update(bob.daq.version)
  if bob.has_visioner and hasattr(bob.visioner, 'version'):
    version_dict.update(bob.visioner.version)

  build = pkg_resources.require('bob')[0]

  bob_version = "'%s' (%s)" % (build.version, platform.platform())
  print 75*'='
  print (" bob %s" % bob_version).center(75)
  print 75*'='
  print ""

  distribution = pkg_resources.require('bob')[0]

  print "Python Egg Properties"
  print "---------------------\n"
  print " * Version         : '%s'" % build.version
  print " * System          : '%s'" % platform.system()
  print " * Platform        : '%s'" % platform.platform()
  print " * Python Version  : '%s'" % platform.python_version()
  print " * Egg Dependencies: "
  for egg in my_eggs():
    print "   - %s, version '%s'" % (egg.key, egg.version)
  print ""

  print "Compiled-in Dependencies"
  print "------------------------\n"

  sep = space + packsize*'=' + space + descsize*'='
  fmt = 2*space + ('%%%ds' % packsize) + space + ('%%%ds' % descsize)
  print sep
  print fmt % ('Package'.ljust(packsize), 'Version'.ljust(descsize))
  print sep
  for k in sorted(version_dict.keys()):
    v = version_dict[k]
    if k.lower() == 'numpy': v = '%s (%s)' % (numpy.version.version, v)
    if k.lower() == 'compiler': v = '-'.join(v)
    elif k.lower() == 'ffmpeg':
      if v.has_key('ffmpeg'): v = v['ffmpeg']
      else: v = ';'.join(['%s-%s' % (x, v[x]) for x in v.keys()])
    elif k.lower() == 'qt4': v = '%s (from %s)' % v
    elif k.lower() == 'fftw': v = '%s (%s)' % v[:2]
    print fmt % (k.ljust(packsize), v.ljust(descsize))
  print sep
Example #27
0
def _setup(**kwargs):
    '''we'll make use of Distribution's __init__ downloading setup_requires packages right away here'''
    from setuptools.dist import Distribution
    dist = Distribution(kwargs)

    # now that we supposedly have at least numpy + cython installed, use them
    # they're dropped in cwd as egg-dirs however. let's discover those first
    from pkg_resources import require
    require("Cython")
    require("numpy")
    _numpy_monkey()
    import Cython.Distutils
    cmdclass = {'build_ext': Cython.Distutils.build_ext}
    from numpy import get_include
    ext_modules = [Extension("pymor.tools.relations", ["src/pymor/tools/relations.pyx"], include_dirs=[get_include()]),
                   Extension("pymor.tools.inplace", ["src/pymor/tools/inplace.pyx"], include_dirs=[get_include()])]
    kwargs['cmdclass'] = cmdclass
    kwargs['ext_modules'] = ext_modules

    # lastly we'll need to tweak matplotlibs config dir or else
    # installing it from setup will result in a SandboxViolation
    import os
    os.environ['MPLCONFIGDIR'] = "."

    from numpy.distutils.core import setup
    return setup(**kwargs)
Example #28
0
def deploy():
    """Deploy to production."""
    _require_root()

    if not confirm("This will apply any available migrations to the database. Has the database been backed up?"):
        abort("Aborted.")
    if not confirm("Are you sure you want to deploy?"):
        abort("Aborted.")

    with lcd(PRODUCTION_DOCUMENT_ROOT):
        with shell_env(PRODUCTION="TRUE"):
            local("git pull")
            with open("requirements.txt", "r") as req_file:
                requirements = req_file.read().strip().split()
                try:
                    pkg_resources.require(requirements)
                except:
                    local("pip install -U -r requirements.txt")
                else:
                    puts("Python requirements already satisfied.")
            with prefix("source /usr/local/virtualenvs/ion/bin/activate"):
                local("./manage.py collectstatic --noinput")
                local("./manage.py migrate")
            restart_production_gunicorn(True)

    puts("Deploy complete.")
Example #29
0
def cli():
    parser = argparse.ArgumentParser(
        prog='pasteraw',
        description='Pipe stdin or files to a raw pastebin.')
    parser.add_argument(
        'files', metavar='file', nargs='*',
        help='one or more file names')
    parser.add_argument(
        '--endpoint', default=ENDPOINT,
        help=argparse.SUPPRESS)
    parser.add_argument(
        '--max-content-length', type=int, default=1048576,
        help=argparse.SUPPRESS)
    parser.add_argument(
        '--debug', action='store_true',
        help=argparse.SUPPRESS)
    parser.add_argument(
        '--version', action='store_true',
        help='show version number and exit')
    args = parser.parse_args()

    if args.debug:
        LOG.setLevel(logging.DEBUG)
    else:
        LOG.setLevel(logging.WARN)

    if args.version:
        print pkg_resources.require('pasteraw')[0]
        raise SystemExit()

    main(args)
Example #30
0
def setuptools_is_new_enough(required_version):
    """Return True if setuptools is already installed and has a version
    number >= required_version."""
    if 'pkg_resources' in sys.modules:
        import pkg_resources
        try:
            pkg_resources.require('setuptools >= %s' % (required_version,))
        except pkg_resources.VersionConflict:
            # An insufficiently new version is installed.
            return False
        else:
            return True
    else:
        try:
            import pkg_resources
        except ImportError:
            # Okay it is not installed.
            return False
        else:
            try:
                pkg_resources.require('setuptools >= %s' % (required_version,))
            except pkg_resources.VersionConflict:
                # An insufficiently new version is installed.
                pkg_resources.__dict__.clear() # "If you want to be absolutely sure... before deleting it." --said PJE on IRC
                del sys.modules['pkg_resources']
                return False
            else:
                pkg_resources.__dict__.clear() # "If you want to be absolutely sure... before deleting it." --said PJE on IRC
                del sys.modules['pkg_resources']
                return True
Example #31
0
#!/usr/bin/env python
"""
    Setup file for hcrystalball.
    Use setup.cfg to configure your project.

    This file was generated with PyScaffold 3.1.
    PyScaffold helps you to put up the scaffold of your new Python project.
    Learn more under: https://pyscaffold.org/
"""
import sys

from pkg_resources import require, VersionConflict
from setuptools import setup

try:
    require('setuptools>=38.3')
except VersionConflict:
    print("Error: version of setuptools is too old (<38.3)!")
    sys.exit(1)

if __name__ == "__main__":
    setup(use_pyscaffold=True)
Example #32
0
# The master toctree document.
master_doc = 'index'

# General information about the project.
project = u'enoslib'
copyright = u'2017, Ronan-Alexandre Cherrueau, Matthieu Simonin'
author = u'Ronan-Alexandre Cherrueau, Matthieu Simonin'

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import pkg_resources
version = pkg_resources.require(project)[0].version
# The full version, including alpha/beta/rc tags.
release = version

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
Example #33
0
    def download(self):
        """
        The function that downloads songs from YouTube and SoundCloud

        :return bool result: Result of process, used by unit test
        """
        print()
        atexit.register(print)
        """ Set log level """

        if self.verbose:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.INFO)
        """ Print version """

        logger.info(pkg_resources.require("music_dl")[0])
        """ Validate parameters """

        logger.info('Validating parameters...')

        try:
            # Validate download url
            url_parsed = urlparse(self.download_url)
            if not url_parsed.scheme.startswith('http'):
                raise DirectoryException(
                    'Invalid URL. URL must start with http*. Input value is {}'
                    .format(self.download_url))
            tld_parsed = tldextract.extract(self.download_url)
            if not (tld_parsed.domain in ['youtube', 'soundcloud']):
                raise DirectoryException(
                    'Invalid URL. Music Downloader supports only YouTube and SoundCloud. Input value is {}'
                    .format(self.download_url))
            # Validate download directory
            if not is_path_exists_or_creatable(self.working_dir):
                raise DirectoryException(
                    'Invalid directory. Please specify valid download directory. Input value is {}'
                    .format(self.working_dir))

        except DirectoryException as e:
            logger.error(e.message)
            logger.fatal('Aborted.')
            exit()

        # Validate playlist configuration
        try:
            self.playlist.validate()

        except PlaylistParameterException as e:
            logger.error(e.message)
            logger.fatal('Aborted.')
            exit()

        logger.info('Done.')
        """ Retrieve playlist """

        download_dir = None
        try:
            download_dir = self.playlist.preprocess(self.download_url,
                                                    self.working_dir)

        except PlaylistPreprocessException as e:
            logger.error(e.message)
            logger.error(e.data)
            logger.fatal('Aborted.')
            exit()
        """ Download playlist """

        is_downloaded = False
        try:
            is_downloaded = self.playlist.download()

        except PlaylistPreprocessException as e:
            logger.error(e.message)
            logger.error(e.data)
            logger.fatal('Aborted.')
            exit()
        """ Update metadata """

        if is_downloaded:
            self.metadata_editor.update(
                download_dir=self.playlist.download_dir,
                pl_data=self.playlist.downloaded_playlist_data,
                is_playlist=self.playlist.is_playlist,
            )
        """ Cleanup download directory """

        self.playlist.cleanup()
        """ Print completion message """

        logger.info('All process has done.')
        logger.info('Now you can find downloaded songs at {}'.format(
            colorama.Fore.LIGHTCYAN_EX + download_dir))
        """ Open download directory """

        if self.open_dir and platform.system().lower() == 'darwin':
            subprocess.check_output(['open', download_dir],
                                    stderr=subprocess.STDOUT)

        return True
Example #34
0
def main(argsl=None):  # type: (List[str]) -> int
    if argsl is None:
        argsl = sys.argv[1:]

    parser = argparse.ArgumentParser()
    parser.add_argument("--rdf-serializer",
                        help="Output RDF serialization format used by --print-rdf (one of turtle (default), n3, nt, xml)",
                        default="turtle")

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument("--print-jsonld-context", action="store_true",
                         help="Print JSON-LD context for schema")
    exgroup.add_argument(
        "--print-rdfs", action="store_true", help="Print RDF schema")
    exgroup.add_argument("--print-avro", action="store_true",
                         help="Print Avro schema")

    exgroup.add_argument("--print-rdf", action="store_true",
                         help="Print corresponding RDF graph for document")
    exgroup.add_argument("--print-pre", action="store_true",
                         help="Print document after preprocessing")
    exgroup.add_argument(
        "--print-index", action="store_true", help="Print node index")
    exgroup.add_argument("--print-metadata",
                         action="store_true", help="Print document metadata")
    exgroup.add_argument("--version", action="store_true",
                         help="Print version")

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument("--strict", action="store_true", help="Strict validation (unrecognized or out of place fields are error)",
                         default=True, dest="strict")
    exgroup.add_argument("--non-strict", action="store_false", help="Lenient validation (ignore unrecognized fields)",
                         default=True, dest="strict")

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument("--verbose", action="store_true",
                         help="Default logging")
    exgroup.add_argument("--quiet", action="store_true",
                         help="Only print warnings and errors.")
    exgroup.add_argument("--debug", action="store_true",
                         help="Print even more logging")

    parser.add_argument("schema", type=str)
    parser.add_argument("document", type=str, nargs="?", default=None)

    args = parser.parse_args(argsl)

    if args.quiet:
        _logger.setLevel(logging.WARN)
    if args.debug:
        _logger.setLevel(logging.DEBUG)

    pkg = pkg_resources.require("schema_salad")
    if pkg:
        if args.version:
            print("%s %s" % (sys.argv[0], pkg[0].version))
            return 0
        else:
            _logger.info("%s %s", sys.argv[0], pkg[0].version)

    # Get the metaschema to validate the schema
    metaschema_names, metaschema_doc, metaschema_loader = schema.get_metaschema()

    # Load schema document and resolve refs

    schema_uri = args.schema
    if not urlparse.urlparse(schema_uri)[0]:
        schema_uri = "file://" + os.path.abspath(schema_uri)
    schema_raw_doc = metaschema_loader.fetch(schema_uri)

    try:
        schema_doc, schema_metadata = metaschema_loader.resolve_all(
            schema_raw_doc, schema_uri)
    except (validate.ValidationException) as e:
        _logger.error("Schema `%s` failed link checking:\n%s",
                      args.schema, e, exc_info=(e if args.debug else False))
        _logger.debug("Index is %s", metaschema_loader.idx.keys())
        _logger.debug("Vocabulary is %s", metaschema_loader.vocab.keys())
        return 1

    # Optionally print the schema after ref resolution
    if not args.document and args.print_pre:
        print(json.dumps(schema_doc, indent=4))
        return 0

    if not args.document and args.print_index:
        print(json.dumps(metaschema_loader.idx.keys(), indent=4))
        return 0

    # Validate the schema document against the metaschema
    try:
        schema.validate_doc(metaschema_names, schema_doc,
                            metaschema_loader, args.strict)
    except validate.ValidationException as e:
        _logger.error("While validating schema `%s`:\n%s" %
                      (args.schema, str(e)))
        return 1

    # Get the json-ld context and RDFS representation from the schema
    metactx = {}  # type: Dict[str, str]
    if isinstance(schema_raw_doc, dict):
        metactx = schema_raw_doc.get("$namespaces", {})
        if "$base" in schema_raw_doc:
            metactx["@base"] = schema_raw_doc["$base"]
    (schema_ctx, rdfs) = jsonld_context.salad_to_jsonld_context(schema_doc, metactx)

    # Create the loader that will be used to load the target document.
    document_loader = Loader(schema_ctx)

    # Make the Avro validation that will be used to validate the target
    # document
    (avsc_names, avsc_obj) = schema.make_avro_schema(schema_doc, document_loader)

    if isinstance(avsc_names, Exception):
        _logger.error("Schema `%s` error:\n%s", args.schema,
                      avsc_names, exc_info=(avsc_names if args.debug else False))
        if args.print_avro:
            print(json.dumps(avsc_obj, indent=4))
        return 1

    # Optionally print Avro-compatible schema from schema
    if args.print_avro:
        print(json.dumps(avsc_obj, indent=4))
        return 0

    # Optionally print the json-ld context from the schema
    if args.print_jsonld_context:
        j = {"@context": schema_ctx}
        print(json.dumps(j, indent=4, sort_keys=True))
        return 0

    # Optionally print the RDFS graph from the schema
    if args.print_rdfs:
        print(rdfs.serialize(format=args.rdf_serializer))
        return 0

    if args.print_metadata and not args.document:
        print(json.dumps(schema_metadata, indent=4))
        return 0

    # If no document specified, all done.
    if not args.document:
        print("Schema `%s` is valid" % args.schema)
        return 0

    # Load target document and resolve refs
    try:
        uri = args.document
        if not urlparse.urlparse(uri)[0]:
            doc = "file://" + os.path.abspath(uri)
        document, doc_metadata = document_loader.resolve_ref(uri)
    except (validate.ValidationException, RuntimeError) as e:
        _logger.error("Document `%s` failed validation:\n%s",
                      args.document, e, exc_info=(e if args.debug else False))
        return 1

    # Optionally print the document after ref resolution
    if args.print_pre:
        print(json.dumps(document, indent=4))
        return 0

    if args.print_index:
        print(json.dumps(document_loader.idx.keys(), indent=4))
        return 0

    # Validate the schema document against the metaschema
    try:
        schema.validate_doc(avsc_names, document,
                            document_loader, args.strict)
    except validate.ValidationException as e:
        _logger.error("While validating document `%s`:\n%s" %
                      (args.document, str(e)))
        return 1

    # Optionally convert the document to RDF
    if args.print_rdf:
        printrdf(args.document, document, schema_ctx, args.rdf_serializer)
        return 0

    if args.print_metadata:
        print(json.dumps(doc_metadata, indent=4))
        return 0

    print("Document `%s` is valid" % args.document)

    return 0
Example #35
0
import RFSignalGenerators
import BPMDevice
from pkg_resources import require

require("numpy")
require("cothread")
require("matplotlib")
import numpy as np
import matplotlib.pyplot as plt
import time


def Template(RF,
             BPM,
             argument1=1,
             argument2=2,
             argument3=3,
             argument4=4,
             report=None):
    """One line introduction to the test

    A more detailed introduction to the test, this can be over multiple lines

    Args:
        RF (RFSignalGenerator Obj): RF interface object.
        BPM (BPMDevice Obj): BPM interface object.
        argument1 (argument type): detail of what the argument is and what it's used for.
        argument2 (argument type): detail of what the argument is and what it's used for.
        argument3 (argument type): detail of what the argument is and what it's used for.
        argument4 (argument type): detail of what the argument is and what it's used for.
        report (LaTeX Report Obj):
Example #36
0
from numpy.random import seed as set_seed

from .cococommands import *  # outdated
from . import config
from . import archiving
from . import rungeneric
from . import genericsettings

from .rungeneric import main

import pkg_resources

__all__ = [# 'main',  # import nothing with "from cocopp import *"
           ]

__version__ = pkg_resources.require('cocopp')[0].version

archives = archiving.KnownArchives()
data_archive = archives.all  # only for historical reasons
bbob = archives.bbob
bbob_noisy = archives.bbob_noisy
bbob_biobj = archives.bbob_biobj
# data_archive = 'use `archives.all` instead'
# bbob = 'use `archives.bbob` instead'
# bbob_noisy = 'use `archives.bbob_noisy` instead'
# bbob_biobj = 'use `archives.bbob_biobj` instead'

class Interface:
    """collection of the most user-relevant modules, methods and data.

    `archives`: online data archives of type `KnownArchives`
Example #37
0
def main():
    """Zegami command line interface."""
    version = pkg_resources.require('zegami-cli')[0].version
    description = dedent(r'''
         ____                      _
        /_  / ___ ___ ____ ___ _  (_)
         / /_/ -_) _ `/ _ `/  ' \/ /
        /___/\__/\_, /\_,_/_/_/_/_/
                /___/  v{}

        Visual data exploration.

    A command line interface for managing Zegami.
    '''.format(version))

    parser = ArgumentParser(
        formatter_class=RawDescriptionHelpFormatter,
        description=description,
    )

    # top level arguments
    parser.add_argument(
        '--version',
        action='version',
        version='%(prog)s {}'.format(version),
    )

    option_mapper = {
        'delete': {
            'help': 'Delete a resource',
            'resources': {
                'collections': collections.delete,
                'dataset': datasets.delete,
                'imageset': imagesets.delete,
            }
        },
        'create': {
            'help': 'Create a resource',
            'resources': {
                'collections': collections.create,
            }
        },
        'list': {
            'help': 'Lists entries of a resource',
            'resources': {
                'projects': projects.enumerate,
            }
        },
        'get': {
            'help': 'Get a resource',
            'resources': {
                'collections': collections.get,
                'dataset': datasets.get,
                'imageset': imagesets.get,
            }
        },
        'publish': {
            'help': 'Publish a resource',
            'resources': {
                'collection': collections.publish,
            }
        },
        'update': {
            'help': 'Update a resource',
            'resources': {
                'collections': collections.update,
                'dataset': datasets.update,
                'imageset': imagesets.update,
            }
        },
    }

    # option mapper parser
    subparsers = parser.add_subparsers()
    for action in option_mapper:
        action_parser = subparsers.add_parser(
            action,
            help=option_mapper[action]['help'],
        )
        # set the action type so we can work out what was chosen
        action_parser.set_defaults(action=action)
        action_parser.add_argument(
            'resource',
            choices=option_mapper[action]['resources'].keys(),
            help='The name of the resource type.')
        if action != "create":
            action_parser.add_argument(
                'id',
                default=None,
                nargs="?",
                help='Resource identifier.',
            )
        action_parser.add_argument(
            '-c',
            '--config',
            help='Path to command configuration yaml.',
        )
        action_parser.add_argument(
            '-p',
            '--project',
            help='The id of the project.',
        )
        _add_standard_args(action_parser)

    # login parser
    login_parser = subparsers.add_parser(
        'login',
        help='Authenticate against the API and store a long lived token',
    )
    login_parser.set_defaults(action='login')
    _add_standard_args(login_parser)

    try:
        args = parser.parse_args()
    except jsonschema.exceptions.ValidationError:
        sys.exit(1)

    if len(sys.argv) == 1:
        parser.print_help(sys.stderr)
        sys.exit(1)

    logger = log.Logger(args.verbose)
    token = auth.get_token(args)
    session = http.make_session(args.url, token)

    if args.action == 'login':
        auth.login(
            logger,
            session,
            args,
        )
        return

    try:
        option_mapper[args.action]['resources'][args.resource](
            logger,
            session,
            args,
        )
    except Exception as e:
        # unhandled exceptions
        if args.verbose:
            raise e
        logger.error('Unhandled exception: {}'.format(e))
        sys.exit(1)
Example #38
0
def get_version():
    return pkg_resources.require("Kotti")[0].version
Example #39
0
def main(argsl=None):  # type: (Optional[List[str]]) -> int
    if argsl is None:
        argsl = sys.argv[1:]

    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--rdf-serializer",
        help=
        "Output RDF serialization format used by --print-rdf (one of turtle (default), n3, nt, xml)",
        default="turtle")

    parser.add_argument("--skip-schemas",
                        action="store_true",
                        default=False,
                        help="If specified, ignore $schemas sections.")
    parser.add_argument("--strict-foreign-properties",
                        action="store_true",
                        help="Strict checking of foreign properties",
                        default=False)

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument("--print-jsonld-context",
                         action="store_true",
                         help="Print JSON-LD context for schema")
    exgroup.add_argument("--print-rdfs",
                         action="store_true",
                         help="Print RDF schema")
    exgroup.add_argument("--print-avro",
                         action="store_true",
                         help="Print Avro schema")

    exgroup.add_argument("--print-rdf",
                         action="store_true",
                         help="Print corresponding RDF graph for document")
    exgroup.add_argument("--print-pre",
                         action="store_true",
                         help="Print document after preprocessing")
    exgroup.add_argument("--print-index",
                         action="store_true",
                         help="Print node index")
    exgroup.add_argument("--print-metadata",
                         action="store_true",
                         help="Print document metadata")
    exgroup.add_argument("--print-inheritance-dot",
                         action="store_true",
                         help="Print graphviz file of inheritance")
    exgroup.add_argument("--print-fieldrefs-dot",
                         action="store_true",
                         help="Print graphviz file of field refs")

    exgroup.add_argument(
        "--codegen",
        type=str,
        metavar="language",
        help="Generate classes in target language, currently supported: python"
    )

    exgroup.add_argument("--print-oneline",
                         action="store_true",
                         help="Print each error message in oneline")

    exgroup.add_argument("--print-doc",
                         action="store_true",
                         help="Print HTML schema documentation page")

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument(
        "--strict",
        action="store_true",
        help=
        "Strict validation (unrecognized or out of place fields are error)",
        default=True,
        dest="strict")
    exgroup.add_argument(
        "--non-strict",
        action="store_false",
        help="Lenient validation (ignore unrecognized fields)",
        default=True,
        dest="strict")

    exgroup = parser.add_mutually_exclusive_group()
    exgroup.add_argument("--verbose",
                         action="store_true",
                         help="Default logging")
    exgroup.add_argument("--quiet",
                         action="store_true",
                         help="Only print warnings and errors.")
    exgroup.add_argument("--debug",
                         action="store_true",
                         help="Print even more logging")

    parser.add_argument(
        '--only',
        action='append',
        help="Use with --print-doc, document only listed types")
    parser.add_argument(
        '--redirect',
        action='append',
        help="Use with --print-doc, override default link for type")
    parser.add_argument(
        '--brand',
        help="Use with --print-doc, set the 'brand' text in nav bar")
    parser.add_argument(
        '--brandlink',
        help="Use with --print-doc, set the link for 'brand' in nav bar")
    parser.add_argument(
        '--primtype',
        default="#PrimitiveType",
        help=
        "Use with --print-doc, link to use for primitive types (string, int etc)"
    )

    parser.add_argument("schema", type=str, nargs="?", default=None)
    parser.add_argument("document", type=str, nargs="?", default=None)
    parser.add_argument("--version",
                        "-v",
                        action="store_true",
                        help="Print version",
                        default=None)

    args = parser.parse_args(argsl)

    if args.version is None and args.schema is None:
        print('%s: error: too few arguments' % sys.argv[0])
        return 1

    if args.quiet:
        _logger.setLevel(logging.WARN)
    if args.debug:
        _logger.setLevel(logging.DEBUG)

    pkg = pkg_resources.require("schema_salad")
    if pkg:
        if args.version:
            print("%s Current version: %s" % (sys.argv[0], pkg[0].version))
            return 0
        else:
            _logger.info("%s Current version: %s", sys.argv[0], pkg[0].version)

    # Get the metaschema to validate the schema
    metaschema_names, metaschema_doc, metaschema_loader = schema.get_metaschema(
    )

    # Load schema document and resolve refs

    schema_uri = args.schema
    if not (urllib.parse.urlparse(schema_uri)[0]
            and urllib.parse.urlparse(schema_uri)[0]
            in [u'http', u'https', u'file']):
        schema_uri = file_uri(os.path.abspath(schema_uri))
    schema_raw_doc = metaschema_loader.fetch(schema_uri)

    try:
        schema_doc, schema_metadata = metaschema_loader.resolve_all(
            schema_raw_doc, schema_uri)
    except (validate.ValidationException) as e:
        _logger.error("Schema `%s` failed link checking:\n%s",
                      args.schema,
                      Text(e),
                      exc_info=(True if args.debug else False))
        _logger.debug("Index is %s", list(metaschema_loader.idx.keys()))
        _logger.debug("Vocabulary is %s", list(metaschema_loader.vocab.keys()))
        return 1
    except (RuntimeError) as e:
        _logger.error("Schema `%s` read error:\n%s",
                      args.schema,
                      Text(e),
                      exc_info=(True if args.debug else False))
        return 1

    if args.print_doc:
        makedoc(args)
        return 0

    # Optionally print the schema after ref resolution
    if not args.document and args.print_pre:
        print(json_dumps(schema_doc, indent=4))
        return 0

    if not args.document and args.print_index:
        print(json_dumps(list(metaschema_loader.idx.keys()), indent=4))
        return 0

    # Validate the schema document against the metaschema
    try:
        schema.validate_doc(metaschema_names, schema_doc, metaschema_loader,
                            args.strict)
    except validate.ValidationException as e:
        _logger.error("While validating schema `%s`:\n%s", args.schema,
                      Text(e))
        return 1

    # Get the json-ld context and RDFS representation from the schema
    metactx = schema.collect_namespaces(schema_metadata)
    if "$base" in schema_metadata:
        metactx["@base"] = schema_metadata["$base"]
    if isinstance(schema_doc, CommentedSeq):
        (schema_ctx,
         rdfs) = jsonld_context.salad_to_jsonld_context(schema_doc, metactx)
    else:
        raise Exception("Expected a CommentedSeq, got {}: {}.".format(
            type(schema_doc), schema_doc))

    # Create the loader that will be used to load the target document.
    document_loader = Loader(schema_ctx, skip_schemas=args.skip_schemas)

    if args.codegen:
        codegen.codegen(args.codegen, cast(List[Dict[Text, Any]], schema_doc),
                        schema_metadata, document_loader)
        return 0

    # Make the Avro validation that will be used to validate the target
    # document
    if isinstance(schema_doc, MutableSequence):
        avsc_obj = schema.make_avro(schema_doc, document_loader)
        try:
            avsc_names = schema.make_avro_schema_from_avro(avsc_obj)
        except SchemaParseException as err:
            _logger.error("Schema `%s` error:\n%s",
                          args.schema,
                          Text(err),
                          exc_info=((type(err), err,
                                     None) if args.debug else None))
            if args.print_avro:
                print(json_dumps(avsc_obj, indent=4))
            return 1
    else:
        _logger.error("Schema `%s` must be a list.", args.schema)
        return 1

    # Optionally print Avro-compatible schema from schema
    if args.print_avro:
        print(json_dumps(avsc_obj, indent=4))
        return 0

    # Optionally print the json-ld context from the schema
    if args.print_jsonld_context:
        j = {"@context": schema_ctx}
        print(json_dumps(j, indent=4, sort_keys=True))
        return 0

    # Optionally print the RDFS graph from the schema
    if args.print_rdfs:
        print(rdfs.serialize(format=args.rdf_serializer).decode('utf-8'))
        return 0

    if args.print_metadata and not args.document:
        print(json_dumps(schema_metadata, indent=4))
        return 0

    if args.print_inheritance_dot:
        schema.print_inheritance(schema_doc, sys.stdout)
        return 0

    if args.print_fieldrefs_dot:
        schema.print_fieldrefs(schema_doc, document_loader, sys.stdout)
        return 0

    # If no document specified, all done.
    if not args.document:
        print("Schema `%s` is valid" % args.schema)
        return 0

    # Load target document and resolve refs
    try:
        uri = args.document
        if not urllib.parse.urlparse(uri)[0]:
            doc = "file://" + os.path.abspath(uri)
        document, doc_metadata = document_loader.resolve_ref(
            uri, strict_foreign_properties=args.strict_foreign_properties)
    except validate.ValidationException as e:
        msg = strip_dup_lineno(six.text_type(e))
        msg = to_one_line_messages(str(msg)) if args.print_oneline else msg
        _logger.error("Document `%s` failed validation:\n%s",
                      args.document,
                      msg,
                      exc_info=args.debug)
        return 1
    except RuntimeError as e:
        msg = strip_dup_lineno(six.text_type(e))
        msg = reformat_yaml_exception_message(str(msg))
        msg = to_one_line_messages(msg) if args.print_oneline else msg
        _logger.error("Document `%s` failed validation:\n%s",
                      args.document,
                      msg,
                      exc_info=args.debug)
        return 1

    # Optionally print the document after ref resolution
    if args.print_pre:
        print(json_dumps(document, indent=4))
        return 0

    if args.print_index:
        print(json_dumps(list(document_loader.idx.keys()), indent=4))
        return 0

    # Validate the user document against the schema
    try:
        schema.validate_doc(
            avsc_names,
            document,
            document_loader,
            args.strict,
            strict_foreign_properties=args.strict_foreign_properties)
    except validate.ValidationException as e:
        msg = to_one_line_messages(str(e)) if args.print_oneline else str(e)
        _logger.error("While validating document `%s`:\n%s" %
                      (args.document, msg))
        return 1

    # Optionally convert the document to RDF
    if args.print_rdf:
        if isinstance(document, (Mapping, MutableSequence)):
            printrdf(args.document, document, schema_ctx, args.rdf_serializer)
            return 0
        else:
            print("Document must be a dictionary or list.")
            return 1

    if args.print_metadata:
        print(json_dumps(doc_metadata, indent=4))
        return 0

    print("Document `%s` is valid" % args.document)

    return 0
#
# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved.
# This component and the accompanying materials are made available
# under the terms of "Eclipse Public License v1.0"
# which accompanies this distribution, and is available
# at the URL "http://www.eclipse.org/legal/epl-v10.html".
#
# Initial Contributors:
# Nokia Corporation - initial contribution.
#
# Contributors:
#
# Description:
#

__version__ = 0.1

import pkg_resources
import sys, os

try:
    pkg_resources.require("Cone")
except pkg_resources.DistributionNotFound:
    ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
    sys.path.append(ROOT_PATH)
    sys.path.append(os.path.join(ROOT_PATH, '..'))
    sys.path.append(os.path.join(ROOT_PATH, '../..'))
Example #41
0
    def search(self, search_params):
        """
        search_params - Dictionary that contains the search params in key/value form.

         Mandatory search params:

            affid        	: Affiliate identifier provided by Careerjet if you have a Careerjet partner account.
                                 You can open a careerjet partner account here http://www.careerjet.co.uk/partners/
                                  Default: none
             user_ip      	: IP address of the end-user to whom the search results will be displayed

             user_agent   	: User agent of the end-user's browser

             url          	: URL of page that will display the search results


         Available search params: All params below have default values and are not mandatory

             keywords     	: Keywords to match either title, content or company name of job offer
                                  Examples: 'perl developer', 'ibm', 'software architect'
                                  Default : none

             location     	: Location of requested job postings.
                                  Examples: 'London' , 'Yorkshire', 'France'
                                  Default: country specified by country code

             sort         	: Type of sort. This can be:
                               'relevance'  - sorted by decreasing relevancy (default)
                               'date'       - sorted by decreasing date
                               'salary'     - sorted by decreasing salary

             start_num    	: Position of returned job postings within the entire result space.
                                  This should be a least 1 but not more than the total number of job offers.
                                  Default : 1

             pagesize     	: Number of returned results
                                  Default : 20
                                        Maximum : 100

             page 		: Page number of returned job postings within the entire result space.
                                  This can be used instead of start_num. The minimum page number is 1.
                                  The maximum number of pages is given by $result->{'pages'}
                                  If this value is set, it overrides start_num.

             contracttype 	: Selected contract type. The following codes can be used:
                               'p'    - permanent
                               'c'    - contract
                               't'    - temporary
                               'i'    - training
                               'v'    - voluntary
                                  Default: none (all contract types)

             contractperiod 	: Selected contract period. The following codes can be used:
                               'f'     - full time
                               'p'     - part time
                                  Default: none (all contract periods)
        """
        for field in search_params:
            if not field in Constants.ALLOWED_FIELDS:
                raise Exception('Unknown param key \'' + field + '\'')

        for field in Constants.MANDATORY_FIELDS:
            if not field in search_params or not search_params[field]:
                raise Exception('Mandatory param key \'' + field +
                                '\' missing')

        if 'locale_code' not in search_params:
            search_params['locale_code'] = self.locale_code

        if search_params['locale_code'] not in Constants.LOCALES:
            raise Exception('Locale ' + search_params['locale_code'] +
                            ' not supported')

        referer_uri = urlparse(search_params.pop('url'))
        if referer_uri.scheme != 'http' and referer_uri.scheme != 'https':
            raise Exception('Invalid param url \'' + search_params['url'] +
                            '\'')

        user_agent = 'careerjet-api-client-v' + pkg_resources.require(
            "careerjet_api"
        )[0].version + '-python-v' + platform.python_version()

        try:
            response = requests.get(Constants.API_URL + '/search',
                                    headers={
                                        'user-agent': user_agent,
                                        'referer': referer_uri.geturl()
                                    },
                                    params=search_params)
        except Exception as e:
            raise e

        # If we made a bad request (a 4XX client error or 5XX server error response), we can raise it with
        response.raise_for_status()

        try:
            return json.loads(response.text)
        except Exception as e:
            raise e
Example #42
0
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0

import pkg_resources

__version__ = pkg_resources.require('arvados-cwl-runner')[0].version
Example #43
0
import pkg_resources
__version__ = pkg_resources.require('vtem')[0].version
Example #44
0
def main():
    print("""
____    __    ____  __  .__   __. .__   __.      ___       __  ___  _______ .______
\   \  /  \  /   / |  | |  \ |  | |  \ |  |     /   \     |  |/  / |   ____||   _  \\
 \   \/    \/   /  |  | |   \|  | |   \|  |    /  ^  \    |  '  /  |  |__   |  |_)  |
  \            /   |  | |  . `  | |  . `  |   /  /_\  \   |    <   |   __|  |      /
   \    /\    /    |  | |  |\   | |  |\   |  /  _____  \  |  .  \  |  |____ |  |\  \----.
    \__/  \__/     |__| |__| \__| |__| \__| /__/     \__\ |__|\__\ |_______|| _| `._____|

    """)
    parser = argparse.ArgumentParser()
    parser.add_argument("-s",
                        "--start",
                        help="starts manual execution of the pipline",
                        action="store_true")
    parser.add_argument("-fb",
                        "--forcebake",
                        help="force bake, to be used wth --start ",
                        action="store_true")
    parser.add_argument("-a",
                        "--app",
                        type=str,
                        help="the name of application to look for",
                        default=cfg_app_name)
    parser.add_argument("-p",
                        "--pipeline",
                        type=str,
                        help="the name of pipline to test",
                        default=os.environ["WINNAKER_PIPELINE_NAME"])
    parser.add_argument("-nl",
                        "--nologin",
                        help="will not attempt to login",
                        action="store_true")
    parser.add_argument(
        "-nlb",
        "--nolastbuild",
        help="will not attempt to check last build status or stages",
        action="store_true")
    parser.add_argument("-hl",
                        "--headless",
                        help="will run in an xfvb display ",
                        action="store_true")
    parser.add_argument("-v",
                        "--verbose",
                        help="print more logs, DEBUG level",
                        action="store_true")
    args = parser.parse_args()

    # Logging setup
    if args.verbose:
        log_level = logging.DEBUG
    else:
        log_level = logging.INFO
    logFormatter = logging.Formatter(
        "%(asctime)s [%(levelname)s]  %(message)s")
    rootLogger = logging.getLogger()
    rootLogger.setLevel(log_level)

    fileHandler = logging.FileHandler(
        join(cfg_output_files_path, "winnaker.log"))
    fileHandler.setFormatter(logFormatter)
    rootLogger.addHandler(fileHandler)

    consoleHandler = logging.StreamHandler(sys.stdout)
    consoleHandler.setFormatter(logFormatter)
    rootLogger.addHandler(consoleHandler)

    version = pkg_resources.require("winnaker")[0].version
    logging.info("Winnaker Version: {}".format(version))
    logging.info("Current Config: {}".format(args))

    if not os.path.exists(cfg_output_files_path):
        os.makedirs(cfg_output_files_path)

    if cfg_email_smtp and cfg_email_to and cfg_email_from:
        atexit.register(
            send_mail,
            cfg_email_from,
            cfg_email_to,
            "Winnaker Screenshots " + str(datetime.utcnow()),
            "Here are the screenshots of the spinnaker's last run at " +
            str(datetime.utcnow()) + " UTC Time",
            server=cfg_email_smtp)

    if args.headless:
        logging.debug("Starting virtual display")
        from pyvirtualdisplay import Display
        display = Display(visible=0, size=(2560, 1440))
        display.start()
        logging.debug("Started virtual display")

    s = Spinnaker()
    if not args.nologin:
        logging.debug("Starting login")
        s.login()
    s.get_pipeline(args.app, args.pipeline)
    if not args.nolastbuild:
        logging.info("- Last build status: {}".format(
            s.get_last_build().status.encode('utf-8')))
        logging.info("- Screenshot Stages")
        logging.info("- Current working directory: {}".format(os.getcwd()))
        s.get_stages()

    if args.start:
        logging.debug("Going into start block")
        s.start_manual_execution(force_bake=args.forcebake)

    if args.headless:
        logging.debug("Stopping virtualdisplay")
        display.stop()
        logging.debug("virtualdisplay stopped")
Example #45
0
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
u"""Provides unittest classes TestMRA and TestMRADiscontinuousDiscretisation.
"""

__id__ = "$Id$"
__author__ = "$Author$"
__version__ = "$Revision$"
# $Source$

import unittest
import pkg_resources
pkg_resources.require("pyphant")

import numpy
from pyphant.core import DataContainer as DC


class TestEmd5Source(unittest.TestCase):
    """Sets up a random FieldContainer and ragisters it with the
    knowledge manager."""
    def setUp(self):
        self.V = DC.FieldContainer(numpy.random.randn(10, 10))
        self.V.seal()
        from pyphant.core import KnowledgeManager
        KnowledgeManager.KnowledgeManager.getInstance().registerDataContainer(
            self.V, temporary=True)
Example #46
0
'''
Retrieve the version number of the pipeline from the package
configuration.
'''

# This solution was suggested on Stack Overflow:
# http://stackoverflow.com/questions/2058802/how-can-i-get-the-version-defined-in-setup-py-setuptools-in-my-package

import pkg_resources  # part of setuptools

version = pkg_resources.require("hiplexpipe")[0].version
Example #47
0
def missing_requirements(specifiers):
    for specifier in specifiers:
        try:
            pkg_resources.require(specifier)
        except pkg_resources.DistributionNotFound:
            yield specifier
Example #48
0
    if pct % 2 == 0:
        mestate.console.write('#')


if __name__ == "__main__":
    multiproc.freeze_support()
    from argparse import ArgumentParser
    parser = ArgumentParser(description=__doc__)
    parser.add_argument("--version", action='store_true', help="show version")
    parser.add_argument("files", metavar="<FILE>", nargs="?")
    args = parser.parse_args()

    if args.version:
        #pkg_resources doesn't work in the windows exe build, so read the version file
        try:
            version = pkg_resources.require("mavproxy")[0].version
        except Exception as e:
            start_script = os.path.join(os.environ['LOCALAPPDATA'], "MAVProxy",
                                        "version.txt")
            f = open(start_script, 'r')
            version = f.readline()
        print("MAVExplorer Version: " + version)
        sys.exit(1)

    mestate = MEState()
    setup_file_menu()

    mestate.rl = rline.rline("MAV> ", mestate)

    #If specified, open the log file
    if args.files is not None and len(args.files) != 0:
Example #49
0
Customization
----------------------------------------------------------------------------

Users should customize this module by editing its 
configuration file.  In this file they should edit the 
``search.searchbar`` and ``search.keywords`` settings to 
match their own personal search preferences.  These 
variables map *what you say* to which *search engines* to 
use.

"""

try:
    import pkg_resources
    pkg_resources.require("dragonfly >= 0.6.5beta1.dev-r76")
except ImportError:
    pass

from dragonfly import *

#---------------------------------------------------------------------------
# Set up this module's configuration.

config = Config("Firefox control")
config.search = Section("Search-related section")
config.search.keywords = Item(
    default={
        "wikipedia": "wikipedia",
    },
    doc="Mapping of spoken-forms to Firefox search-keywords.",
Example #50
0
def get_version(package):
    return pkg_resources.require(package)[0].version
Example #51
0
    async def on_ready():
        if bot.uptime is not None:
            return

        bot.uptime = datetime.datetime.utcnow()
        packages = []

        if cli_flags.no_cogs is False:
            packages.extend(await bot.db.packages())

        if cli_flags.load_cogs:
            packages.extend(cli_flags.load_cogs)

        if packages:
            # Load permissions first, for security reasons
            try:
                packages.remove("permissions")
            except ValueError:
                pass
            else:
                packages.insert(0, "permissions")

            to_remove = []
            print("Loading packages...")
            for package in packages:
                try:
                    spec = await bot.cog_mgr.find_cog(package)
                    await bot.load_extension(spec)
                except Exception as e:
                    log.exception("Failed to load package {}".format(package),
                                  exc_info=e)
                    await bot.remove_loaded_package(package)
                    to_remove.append(package)
            for package in to_remove:
                packages.remove(package)
            if packages:
                print("Loaded packages: " + ", ".join(packages))

        if bot.rpc_enabled:
            await bot.rpc.initialize()

        guilds = len(bot.guilds)
        users = len(set([m for m in bot.get_all_members()]))

        try:
            data = await bot.application_info()
            invite_url = discord.utils.oauth_url(data.id)
        except:
            invite_url = "Could not fetch invite url"

        prefixes = cli_flags.prefix or (await bot.db.prefix())
        lang = await bot.db.locale()
        red_pkg = pkg_resources.get_distribution("Red-DiscordBot")
        dpy_version = discord.__version__

        INFO = [
            str(bot.user),
            "Prefixes: {}".format(", ".join(prefixes)),
            "Language: {}".format(lang),
            "Red Bot Version: {}".format(red_version),
            "Discord.py Version: {}".format(dpy_version),
            "Shards: {}".format(bot.shard_count),
        ]

        if guilds:
            INFO.extend(
                ("Servers: {}".format(guilds), "Users: {}".format(users)))
        else:
            print("Ready. I'm not in any server yet!")

        INFO.append("{} cogs with {} commands".format(len(bot.cogs),
                                                      len(bot.commands)))

        with contextlib.suppress(aiohttp.ClientError, discord.HTTPException):
            async with aiohttp.ClientSession() as session:
                async with session.get(
                        "https://pypi.python.org/pypi/red-discordbot/json"
                ) as r:
                    data = await r.json()
            if VersionInfo.from_str(
                    data["info"]["version"]) > red_version_info:
                INFO.append("Outdated version! {} is available "
                            "but you're using {}".format(
                                data["info"]["version"], red_version))
                owner = await bot.get_user_info(bot.owner_id)
                await owner.send(
                    "Your Red instance is out of date! {} is the current "
                    "version, however you are using {}!".format(
                        data["info"]["version"], red_version))
        INFO2 = []

        sentry = await bot.db.enable_sentry()
        mongo_enabled = storage_type() != "JSON"
        reqs_installed = {"voice": None, "docs": None, "test": None}
        for key in reqs_installed.keys():
            reqs = [x.name for x in red_pkg._dep_map[key]]
            try:
                pkg_resources.require(reqs)
            except DistributionNotFound:
                reqs_installed[key] = False
            else:
                reqs_installed[key] = True

        options = (
            ("Error Reporting", sentry),
            ("MongoDB", mongo_enabled),
            ("Voice", reqs_installed["voice"]),
            ("Docs", reqs_installed["docs"]),
            ("Tests", reqs_installed["test"]),
        )

        on_symbol, off_symbol, ascii_border = _get_startup_screen_specs()

        for option, enabled in options:
            enabled = on_symbol if enabled else off_symbol
            INFO2.append("{} {}".format(enabled, option))

        print(Fore.RED + INTRO)
        print(Style.RESET_ALL)
        print(bordered(INFO, INFO2, ascii_border=ascii_border))

        if invite_url:
            print("\nInvite URL: {}\n".format(invite_url))

        bot.color = discord.Colour(await bot.db.color())
        try:
            import Levenshtein
        except ImportError:
            log.info(
                "python-Levenshtein is not installed, fuzzy string matching will be a bit slower."
            )
Example #52
0
new_path = [os.path.join(os.getcwd(), "lib")]
new_path.extend(sys.path[1:])  # remove scripts/ from the path
sys.path = new_path

from galaxy import eggs
import pkg_resources

import galaxy.model.mapping
from galaxy import model, config

import logging
LOG_FORMAT = '%(asctime)s|%(levelname)-8s|%(message)s'
LOG_DATEFMT = '%Y-%m-%d %H:%M:%S'
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']

pkg_resources.require("SQLAlchemy >= 0.4")


def main(ini_file):
    global logger

    # Initializing logger
    logger = init_logger(logging)

    conf_parser = ConfigParser.ConfigParser({'here': os.getcwd()})
    logger.info('Reading galaxy.ini')
    conf_parser.read(ini_file)
    ini_config = dict()
    for key, value in conf_parser.items("app:main"):
        ini_config[key] = value
    ini_config = config.Configuration(**ini_config)
Example #53
0
def link_documentation(additional_packages=['python', 'numpy'],
                       requirements_file="../requirements.txt",
                       server=None):
    """Generates a list of documented packages on our documentation server for the packages read from the "requirements.txt" file and the given list of additional packages.

  Parameters:

  additional_packages : [str]
    A list of additional bob packages for which the documentation urls are added.
    By default, 'numpy' is added

  requirements_file : str or file-like
    The file (relative to the documentation directory), where to read the requirements from.
    If ``None``, it will be skipped.

  server : str or None
    The url to the server which provides the documentation.
    If ``None`` (the default), the ``BOB_DOCUMENTATION_SERVER`` environment variable is taken if existent.
    If neither ``server`` is specified, nor a ``BOB_DOCUMENTATION_SERVER`` environment variable is set, the default ``"http://www.idiap.ch/software/bob/docs/bob/%(name)s/%(version)s/"`` is used.

  """
    def smaller_than(v1, v2):
        """Compares scipy/numpy version numbers"""

        c1 = v1.split('.')
        c2 = v2.split('.')[:len(c1)]  #clip to the compared version
        for i in range(len(c2)):
            n1 = c1[i]
            n2 = c2[i]
            try:
                n1 = int(n1)
                n2 = int(n2)
            except ValueError:
                n1 = str(n1)
                n2 = str(n2)
            if n1 < n2: return True
            if n1 > n2: return False
        return False

    if sys.version_info[0] <= 2:
        import urllib2 as urllib
        from urllib2 import HTTPError, URLError
    else:
        import urllib.request as urllib
        import urllib.error as error
        HTTPError = error.HTTPError
        URLError = error.URLError

    # collect packages are automatically included in the list of indexes
    packages = []
    version_re = re.compile(r'\s*[\<\>=]+\s*')
    if requirements_file is not None:
        if not isinstance(requirements_file, str) or \
            os.path.exists(requirements_file):
            requirements = load_requirements(requirements_file)
            packages += [version_re.split(k)[0] for k in requirements]
    packages += additional_packages

    def _add_index(name, addr, packages=packages):
        """Helper to add a new doc index to the intersphinx catalog

    Parameters:

      name (str): Name of the package that will be added to the catalog
      addr (str): The URL (except the ``objects.inv`` file), that will be added

    """

        if name in packages:
            print("Adding intersphinx source for `%s': %s" % (name, addr))
            mapping[name] = (addr, None)
            packages = [k for k in packages if k != name]

    def _add_numpy_index():
        """Helper to add the numpy manual"""

        try:
            import numpy
            ver = numpy.version.version
            if smaller_than(ver, '1.5.z'):
                ver = '.'.join(ver.split('.')[:-1]) + '.x'
            else:
                ver = '.'.join(ver.split('.')[:-1]) + '.0'
            _add_index('numpy', 'https://docs.scipy.org/doc/numpy-%s/' % ver)

        except ImportError:
            _add_index('numpy', 'https://docs.scipy.org/doc/numpy/')

    def _add_scipy_index():
        """Helper to add the scipy manual"""

        try:
            import scipy
            ver = scipy.version.version
            if smaller_than(ver, '0.9.0'):
                ver = '.'.join(ver.split('.')[:-1]) + '.x'
            else:
                ver = '.'.join(ver.split('.')[:-1]) + '.0'
            _add_index('scipy',
                       'https://docs.scipy.org/doc/scipy-%s/reference/' % ver)

        except ImportError:
            _add_index('scipy', 'https://docs.scipy.org/doc/scipy/reference/')

    mapping = {}

    # add indexes for common packages used in Bob
    _add_index('python',
               'https://docs.python.org/%d.%d/' % sys.version_info[:2])
    _add_numpy_index()
    _add_scipy_index()
    _add_index('matplotlib', 'http://matplotlib.org/')
    _add_index('setuptools', 'https://setuptools.readthedocs.io/en/latest/')
    _add_index('six', 'https://six.readthedocs.io')
    _add_index('sqlalchemy', 'https://docs.sqlalchemy.org/en/latest/')
    _add_index('docopt', 'http://docopt.readthedocs.io/en/latest/')
    _add_index('scikit-image', 'http://scikit-image.org/docs/dev/')
    _add_index('pillow', 'http://pillow.readthedocs.io/en/latest/')
    _add_index('click', 'http://click.pocoo.org/')

    # get the server for the other packages
    if server is None:
        if "BOB_DOCUMENTATION_SERVER" in os.environ:
            server = os.environ["BOB_DOCUMENTATION_SERVER"]
        else:
            server = "http://www.idiap.ch/software/bob/docs/bob/%(name)s/%(version)s/|http://www.idiap.ch/software/bob/docs/bob/%(name)s/master/"

    # array support for BOB_DOCUMENTATION_SERVER
    # transforms "(file:///path/to/dir  https://example.com/dir| http://bla )"
    # into ["file:///path/to/dir", "https://example.com/dir", "http://bla"]
    # so, trim eventual parenthesis/white-spaces and splits by white space or |
    if server.strip():
        server = re.split(r'[|\s]+', server.strip('() '))
    else:
        server = []

    # check if the packages have documentation on the server
    for p in packages:
        if p in mapping: continue  #do not add twice...

        for s in server:
            # generate URL
            package_name = p.split()[0]
            if s.count('%s') == 1:  #old style
                url = s % package_name
            else:  #use new style, with mapping, try to link against specific version
                try:
                    version = 'v' + pkg_resources.require(
                        package_name)[0].version
                except pkg_resources.DistributionNotFound:
                    version = 'stable'  #package is not a runtime dep, only referenced
                url = s % {'name': package_name, 'version': version}

            try:
                # otherwise, urlopen will fail
                if url.startswith('file://'):
                    f = urllib.urlopen(urllib.Request(url + 'objects.inv'))
                    url = url[7:]  #intersphinx does not like file://
                else:
                    f = urllib.urlopen(urllib.Request(url))

                # request url
                print(
                    "Found documentation for %s on %s; adding intersphinx source"
                    % (p, url))
                mapping[p] = (url, None)
                break  #inner loop, for server, as we found a candidate!

            except HTTPError as exc:
                if exc.code != 404:
                    # url request failed with a something else than 404 Error
                    print("Requesting URL %s returned error: %s" % (url, exc))
                    # notice mapping is not updated here, as the URL does not exist

            except URLError as exc:
                print("Requesting URL %s did not succeed (maybe offline?). " \
                    "The error was: %s" % (url, exc))

            except IOError as exc:
                print("Path %s does not exist. The error was: %s" % (url, exc))

    return mapping
Example #54
0
from .directives import *
from .policy import *
from .thinning import *
from .knowledge_distillation import KnowledgeDistillationPolicy, DistillationLossWeights
from .summary_graph import SummaryGraph, onnx_name_2_pytorch_name
from .early_exit import EarlyExitMgr
import pkg_resources
import logging
logging.captureWarnings(True)

del dict_config
del thinning

# Distiller version
try:
    __version__ = pkg_resources.require("distiller")[0].version
except pkg_resources.DistributionNotFound:
    __version__ = "Unknown"


def model_find_param_name(model, param_to_find):
    """Look up the name of a model parameter.

    Arguments:
        model: the model to search
        param_to_find: the parameter whose name we want to look up

    Returns:
        The parameter name (string) or None, if the parameter was not found.
    """
    for name, param in model.named_parameters():
Example #55
0
# pyusnvc package

import pkg_resources

from . import usnvc

__version__ = pkg_resources.require("pyusnvc")[0].version


def get_package_metadata():
    d = pkg_resources.get_distribution('pyusnvc')
    for i in d._get_metadata(d.PKG_INFO):
        print(i)

Example #56
0
#
# Distributed under the GPLv3 License.
# See accompanying file LICENSE.txt or copy at
# http://www.gnu.org/licenses/gpl-3.0.html
#
# Website: https://www.github.com/cokelaer/colormap
# Documentation: http://packages.python.org/colormap
#
##############################################################################
"""main colormap module"""
from __future__ import print_function
from __future__ import division

import pkg_resources
try:
    version = pkg_resources.require("colormap")[0].version
    __version__ = version
except Exception:
    version = ''

from .xfree86 import *

from . import colors
from .colors import *
from .get_cmap import *

c = Colormap()
colormap_names = c.colormaps + c.diverging_black
# create an alias to test_colormap methiod
test_colormap = c.test_colormap
test_cmap = c.test_colormap
Example #57
0
def get_version():
    '''Returns the version details for tinypascal.
    '''
    packages = pkg_resources.require('tinypascal')
    return packages[0].version
Example #58
0
# -*- coding: utf-8 -*-
"""This module contains functions called from console script entry points."""

import os
import sys

from os.path import dirname, exists, join

import pkg_resources
pkg_resources.require("TurboGears")

import turbogears
import cherrypy
from certmaster import utils

cherrypy.lowercase_api = True

class ConfigurationError(Exception):
    pass

#that variable will help us to see when we are in PRODUCTION 
PRODUCTION_ENV = False

def start():
    """Start the CherryPy application server."""
    global PRODUCTION_ENV
    setupdir = dirname(dirname(__file__))
    curdir = os.getcwd()

    # First look on the command line for a desired config file,
    # if it's not on the command line, then look for 'setup.py'
Example #59
0
 def get_package_name(self):
     return pkg_resources.require('pytify')[0]
Example #60
0
    USE_CYTHON = True

else:
    USE_CYTHON = False

if USE_CYTHON and not HAVE_CYTHON:
    raise ValueError('''
        Cython could not be found. Please install Cython and try again.
        ''')

# Try bootstrapping setuptools if it doesn't exist. This is for using the
# `develop` command, which is very useful for in-place development work.
try:
    import pkg_resources
    try:
        pkg_resources.require("setuptools>=0.6c5")
    except pkg_resources.VersionConflict:
        from ez_setup import use_setuptools
        use_setuptools(version="0.6c5")
    from setuptools import setup, Command
except ImportError:
    sys.exit('pybedtools uses setuptools '
             '(https://packaging.python.org/installing/) '
             'for installation but setuptools was not found')

curdir = os.path.abspath(os.path.dirname(__file__))

# These imports need to be here; setuptools needs to be imported first.
from distutils.extension import Extension  # noqa: E402
from distutils.command.build import build  # noqa: E402
from distutils.command.build_ext import build_ext  # noqa: E402