def version(): import pkg_resources yaybu_version = pkg_resources.get_distribution('Yaybu').version yay_version = pkg_resources.get_distribution('Yay').version return 'Yaybu %s\n' \ 'yay %s' % (yaybu_version, yay_version)
def get_dist_dependencies(name, recurse=True): """ Get the dependencies of the given, already installed distribution. @param recurse If True, recursively find all dependencies. @returns A set of package names. @note The first entry in the list is always the top-level package itself. """ dist = pkg_resources.get_distribution(name) pkg_name = convert_name(dist.project_name) reqs = set() working = set([dist]) depth = 0 while working: deps = set() for distname in working: dist = pkg_resources.get_distribution(distname) pkg_name = convert_name(dist.project_name) reqs.add(pkg_name) for req in dist.requires(): reqs_ = convert_requirement(req) deps |= set(x.split('-', 1)[0] for x in reqs_ if not x.startswith('!')) working = deps - reqs depth += 1 if (not recurse) and (depth >= 2): break return reqs
def create_build_env(): # Create virtualenv. virtualenv_dir = 'virtualenv' check_call(['virtualenv', virtualenv_dir]) import sysconfig scripts_dir = os.path.basename(sysconfig.get_path('scripts')) activate_this_file = os.path.join(virtualenv_dir, scripts_dir, 'activate_this.py') with open(activate_this_file) as f: exec(f.read(), dict(__file__=activate_this_file)) # Upgrade pip because installation of sphinx with pip 1.1 available on Travis # is broken (see #207) and it doesn't support the show command. from pkg_resources import get_distribution, DistributionNotFound pip_version = get_distribution('pip').version if LooseVersion(pip_version) < LooseVersion('1.5.4'): print("Updating pip") check_call(['pip', 'install', '--upgrade', 'pip']) # Upgrade distribute because installation of sphinx with distribute 0.6.24 # available on Travis is broken (see #207). try: distribute_version = get_distribution('distribute').version if LooseVersion(distribute_version) <= LooseVersion('0.6.24'): print("Updating distribute") check_call(['pip', 'install', '--upgrade', 'distribute']) except DistributionNotFound: pass # Install Sphinx and Breathe. pip_install('fmtlib/sphinx', '12dde8afdb0a7bb5576e2656692c3478c69d8cc3', check_version='1.4a0.dev-20151013') pip_install('michaeljones/breathe', '1c9d7f80378a92cffa755084823a78bb38ee4acc')
def print_log(self): log_file = os.path.join(self.log_dir(), 'inputs.yml') if os.path.exists(log_file): with open(log_file, 'r') as log_fh: past_inputs = yaml.load(log_fh) current_inputs = self.__dict__ current_inputs['pvacseq_version'] = pkg_resources.get_distribution("pvacseq").version if past_inputs['pvacseq_version'] != current_inputs['pvacseq_version']: status_message( "Restart to be executed with a different pVAC-Seq version:\n" + "Past version: %s\n" % past_inputs['pvacseq_version'] + "Current version: %s" % current_inputs['pvacseq_version'] ) for key in current_inputs.keys(): if key == 'pvacseq_version': continue if key not in past_inputs.keys() and current_inputs[key] is not None: sys.exit( "Restart inputs are different from past inputs: \n" + "Additional input: %s - %s\n" % (key, current_inputs[key]) + "Aborting." ) elif current_inputs[key] != past_inputs[key]: sys.exit( "Restart inputs are different from past inputs: \n" + "Past input: %s - %s\n" % (key, past_inputs[key]) + "Current input: %s - %s\n" % (key, current_inputs[key]) + "Aborting." ) else: with open(log_file, 'w') as log_fh: inputs = self.__dict__ inputs['pvacseq_version'] = pkg_resources.get_distribution("pvacseq").version yaml.dump(inputs, log_fh, default_flow_style=False)
def mock_tool(self, mock, name): """Register a mock tool that will be returned when getToolByName() is called. """ self._check_super_setup() if self._getToolByName_replacements is None: self._getToolByName_replacements = [] self._getToolByName_replacements.append(self.mocker.replace( 'Products.CMFCore.utils.getToolByName')) import pkg_resources try: pkg_resources.get_distribution( 'Products.PloneHotfix20121106') except pkg_resources.DistributionNotFound: pass else: self._getToolByName_replacements.append(self.mocker.replace( 'Products.PloneHotfix20121106.gtbn.gtbn')) # patch: do not count. for replacement in self._getToolByName_replacements: self.expect(replacement(ANY, name)).result( mock).count(0, None)
def _runscript(scriptname, sandbox=False): """ Find & run a script with exec (i.e. not via os.system or subprocess). """ import pkg_resources ns = {"__name__": "__main__"} ns['sys'] = globals()['sys'] try: pkg_resources.get_distribution("khmer").run_script( scriptname, ns) return 0 except pkg_resources.ResolutionError as err: if sandbox: path = os.path.join(os.path.dirname(__file__), "../sandbox") else: path = scriptpath() scriptfile = os.path.join(path, scriptname) if os.path.isfile(scriptfile): if os.path.isfile(scriptfile): exec(compile(open(scriptfile).read(), scriptfile, 'exec'), ns) return 0 elif sandbox: raise nose.SkipTest("sandbox tests are only run in a repository.") return -1
def _get_dist(self, moddef=None): """ Get distribution object for a module. """ if moddef is None: return pkg_resources.get_distribution('netprofile') return pkg_resources.get_distribution('netprofile_' + moddef)
def get_distribution(self, req): """ This gets a distribution object, and installs the distribution if required. """ try: dist = pkg_resources.get_distribution(req) if self.verbose: print('Distribution already installed:') print(' ', dist, 'from', dist.location) return dist except pkg_resources.DistributionNotFound: if self.options.no_install: print("Because --no-install was given, we won't try to install the package %s" % req) raise options = ['-v', '-m'] for op in self.options.easy_install_op or []: if not op.startswith('-'): op = '--'+op options.append(op) for op in self.options.easy_install_find_links or []: options.append('--find-links=%s' % op) if self.simulate: raise BadCommand( "Must install %s, but in simulation mode" % req) print("Must install %s" % req) from setuptools.command import easy_install from setuptools import setup setup(script_args=['-q', 'easy_install'] + options + [req]) return pkg_resources.get_distribution(req)
def init_app(self, app, minters_entry_point_group=None, fetchers_entry_point_group=None): """Flask application initialization.""" # Initialize CLI app.cli.add_command(cmd) # Initialize logger app.config.setdefault('PIDSTORE_APP_LOGGER_HANDLERS', app.debug) if app.config['PIDSTORE_APP_LOGGER_HANDLERS']: for handler in app.logger.handlers: logger.addHandler(handler) # Initialize admin object link endpoints. try: pkg_resources.get_distribution('invenio-records') app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', dict( rec='recordmetadata.details_view', )) except pkg_resources.DistributionNotFound: app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', {}) # Register template filter app.jinja_env.filters['pid_exists'] = pid_exists # Initialize extension state. state = _PIDStoreState( app=app, minters_entry_point_group=minters_entry_point_group, fetchers_entry_point_group=fetchers_entry_point_group, ) app.extensions['invenio-pidstore'] = state return state
def check_dependencies(path): """ This methods returns a pair of (message, packages missing). Or None if everything is OK. """ log.debug("check dependencies of %s" % path) # noinspection PyBroadException try: from pkg_resources import get_distribution req_path = path + os.sep + 'requirements.txt' if not os.path.isfile(req_path): log.debug('%s has no requirements.txt file' % path) return None missing_pkg = [] with open(req_path) as f: for line in f: stripped = line.strip() # noinspection PyBroadException try: get_distribution(stripped) except Exception: missing_pkg.append(stripped) if missing_pkg: return (('You need those dependencies for %s: ' % path) + ','.join(missing_pkg), missing_pkg) return None except Exception: return ('You need to have setuptools installed for the dependency check of the plugins', [])
def _runscript(scriptname, sandbox=False): """Find & run a script with exec (i.e. not via os.system or subprocess).""" namespace = {"__name__": "__main__"} namespace['sys'] = globals()['sys'] try: pkg_resources.get_distribution("khmer").run_script( scriptname, namespace) return 0 except pkg_resources.ResolutionError: if sandbox: path = os.path.join(os.path.dirname(__file__), "../sandbox") else: path = scriptpath() scriptfile = os.path.join(path, scriptname) if os.path.isfile(scriptfile): if os.path.isfile(scriptfile): exec( # pylint: disable=exec-used compile(open(scriptfile).read(), scriptfile, 'exec'), namespace) return 0 elif sandbox: pytest.skip("sandbox tests are only run in a repository.") return -1
def nupicBindingsPrereleaseInstalled(): """ Make an attempt to determine if a pre-release version of nupic.bindings is installed already. @return: boolean """ try: nupicDistribution = pkg_resources.get_distribution("nupic.bindings") if pkg_resources.parse_version(nupicDistribution.version).is_prerelease: # A pre-release dev version of nupic.bindings is installed. return True except pkg_resources.DistributionNotFound: pass # Silently ignore. The absence of nupic.bindings will be handled by # setuptools by default # Also check for nupic.research.bindings try: nupicDistribution = pkg_resources.get_distribution("nupic.research.bindings") return True except pkg_resources.DistributionNotFound: pass # Silently ignore. The absence of nupic.bindings will be handled by # setuptools by default return False
def version(self, request, **kwargs): from tastypie.http import HttpAccepted, HttpNotModified from pkg_resources import get_distribution import requests self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) self.log_throttled_access(request) local_version = get_distribution('lisa-server').version should_upgrade = False r = requests.get('https://pypi.python.org/pypi/lisa-server/json') if r.status_code == requests.codes.ok: remote_version = r.json()['info']['version'] else: return self.create_response(request, {'status': 'fail', 'log': 'Problem contacting pypi.python.org'}, HttpAccepted) if remote_version > local_version: should_upgrade = True response = { 'local_version': get_distribution('lisa-server').version, 'remote_version': remote_version, 'should_upgrade': should_upgrade } return self.create_response(request, response, HttpAccepted)
def get_distribution(self, req): """ This gets a distribution object, and installs the distribution if required. """ try: dist = pkg_resources.get_distribution(req) if self.verbose: print "Distribution already installed:" print " ", dist, "from", dist.location return dist except pkg_resources.DistributionNotFound: if self.options.no_install: print "Because --no-install was given, we won't try to install the package %s" % req raise options = ["-v", "-m"] for op in self.options.easy_install_op or []: if not op.startswith("-"): op = "--" + op options.append(op) for op in self.options.easy_install_find_links or []: options.append("--find-links=%s" % op) if self.simulate: raise BadCommand("Must install %s, but in simulation mode" % req) print "Must install %s" % req from setuptools.command import easy_install from setuptools import setup setup(script_args=["-q", "easy_install"] + options + [req]) return pkg_resources.get_distribution(req)
def version_from_frame(frame): """ Given a ``frame``, obtain the version number of the module running there. """ module = getmodule(frame) if module is None: s = "<unknown from {0}:{1}>" return s.format(frame.f_code.co_filename, frame.f_lineno) module_name = module.__name__ while True: try: get_distribution(module_name) except DistributionNotFound: # Look at what's to the left of "." module_name, dot, _ = module_name.partition(".") if dot == "": # There is no dot, nothing more we can do. break else: return getversion(module_name) return None
def check_dependencies(req_path: str) -> Tuple[str, Sequence[str]]: """ This methods returns a pair of (message, packages missing). Or None, [] if everything is OK. """ log.debug("check dependencies of %s" % req_path) # noinspection PyBroadException try: from pkg_resources import get_distribution missing_pkg = [] if not os.path.isfile(req_path): log.debug('%s has no requirements.txt file' % req_path) return None, missing_pkg with open(req_path) as f: for line in f: stripped = line.strip() # skip empty lines. if not stripped: continue # noinspection PyBroadException try: get_distribution(stripped) except Exception: missing_pkg.append(stripped) if missing_pkg: return (('You need these dependencies for %s: ' % req_path) + ','.join(missing_pkg), missing_pkg) return None, missing_pkg except Exception: log.exception('Problem checking for dependencies.') return 'You need to have setuptools installed for the dependency check of the plugins', []
def has_package(name): try: pkg_resources.get_distribution(name) except pkg_resources.DistributionNotFound: return False else: return True
def inject_variable(): """ Inject some global variables into all templates """ extras = { 'models_version': get_distribution('datanommer.models').version, 'grepper_version': get_distribution('datagrepper').version, } style = { 'message_bus_link': 'http://fedmsg.com', 'message_bus_shortname': 'fedmsg', 'message_bus_longname': 'fedmsg bus', 'theme_css_url': 'https://apps.fedoraproject.org/global/fedora-bootstrap-1.0/fedora-bootstrap.min.css', 'datagrepper_logo': 'static/datagrepper.png', } for key, default in style.items(): extras[key] = fedmsg_config.get(key, default) if 'fedmenu_url' in fedmsg_config: extras['fedmenu_url'] = fedmsg_config['fedmenu_url'] extras['fedmenu_data_url'] = fedmsg_config['fedmenu_data_url'] if 'websocket_address' in fedmsg_config: extras['websocket_address'] = fedmsg_config['websocket_address'] # Only allow websockets connections to fedoraproject.org, for instance if 'content_security_policy' in fedmsg_config: extras['content_security_policy'] = fedmsg_config['content_security_policy'] return extras
def create_plugin(options): """create a plugin skeleton to start a new project""" # this is actually needed thanks to the skeleton using jinja2 (and six, although that's changeable) try: pkg_resources.get_distribution("sideboard") except pkg_resources.DistributionNotFound: raise BuildFailure("This command must be run from within a configured virtual environment.") plugin_name = options.create_plugin.name if getattr(options.create_plugin, 'drop', False) and (PLUGINS_DIR / path(plugin_name.replace('_', '-'))).exists(): # rmtree fails if the dir doesn't exist apparently (PLUGINS_DIR / path(plugin_name.replace('_', '-'))).rmtree() kwargs = {} for opt in ['webapp', 'sqlalchemy', 'service']: kwargs[opt] = not getattr(options.create_plugin, 'no_' + opt, False) kwargs['cli'] = getattr(options.create_plugin, 'cli', False) if kwargs['cli']: kwargs['webapp'] = False kwargs['service'] = False from data.paver import skeleton skeleton.create_plugin(PLUGINS_DIR, plugin_name, **kwargs) print('{} successfully created'.format(options.create_plugin.name))
def setUpZope(self, app, configurationContext): # prepare installing Products.ATContentTypes import Products.ATContentTypes self.loadZCML(package=Products.ATContentTypes) z2.installProduct(app, 'Products.Archetypes') z2.installProduct(app, 'Products.ATContentTypes') z2.installProduct(app, 'plone.app.blob') # prepare installing plone.app.collection try: pkg_resources.get_distribution('plone.app.collection') z2.installProduct(app, 'plone.app.collection') except pkg_resources.DistributionNotFound: pass # prepare installing plone.app.contenttypes z2.installProduct(app, 'Products.DateRecurringIndex') import plone.app.contenttypes xmlconfig.file( 'configure.zcml', plone.app.contenttypes, context=configurationContext )
def Wrapper(*args, **kwargs): try: pkg_resources.get_distribution(package_name) except pkg_resources.DistributionNotFound: raise unittest.SkipTest("Skipping, package %s not installed" % package_name) return test_function(*args, **kwargs)
def _runscript(scriptname, sandbox=False): import pkg_resources ns = {"__name__": "__main__"} ns['sys'] = globals()['sys'] try: pkg_resources.get_distribution("khmer").run_script( scriptname, ns) return 0 except pkg_resources.ResolutionError as err: if sandbox: paths = [os.path.join(os.path.dirname(__file__), "../sandbox")] else: paths = [os.path.join(os.path.dirname(__file__), "../scripts")] paths.extend(os.environ['PATH'].split(':')) for path in paths: scriptfile = os.path.join(path, scriptname) if os.path.isfile(scriptfile): exec(compile(open(scriptfile).read(), scriptfile, 'exec'), ns) return 0 if sandbox: raise nose.SkipTest("sandbox tests are only run in a repository.") return -1
def resolve(cls, distname, version, source=None): """ Check a dependent package Parameters: - distname: package name - version Return: True if the dependency resolved """ try: version = version or '' get_distribution(distname + version) cls.dependencies[distname] = (version, source) except VersionConflict: version = version or 'any' print "%s: %s(%s)" % (WARNING_VERSION_CONFLICT, colored(distname, COLOR_INFO), colored(version, COLOR_INFO)) cls.terminate = True cls.unresolved[distname] = (version, source) return False except DistributionNotFound: version = version or 'any' print "%s: %s(%s)" % (WARNING_NOT_FOUND, colored(distname, COLOR_INFO), colored(version, COLOR_INFO)) cls.terminate = True cls.unresolved[distname] = (version, source) return False return True
def log_extra(context): try: url = context.request.url except AttributeError: url = None try: request_id = context.request_handler.request.headers.get('Thumbor-Request-Id', 'None') except AttributeError: request_id = None try: thumbor_version = pkg_resources.get_distribution('thumbor').version except pkg_resources.DistributionNotFound: thumbor_version = None try: wikimedia_thumbor_version = pkg_resources.get_distribution('wikimedia_thumbor').version except pkg_resources.DistributionNotFound: wikimedia_thumbor_version = None extras = { 'url': url, 'thumbor-request-id': request_id, 'thumbor-version': thumbor_version, 'wikimedia-thumbor-version': wikimedia_thumbor_version } return extras
def _runscript(scriptname, sandbox=False): """Find & run a script with exec (i.e. not via os.system or subprocess).""" namespace = {"__name__": "__main__"} namespace['sys'] = globals()['sys'] try: pkg_resources.get_distribution("khmer").run_script( scriptname, namespace) return 0 except pkg_resources.ResolutionError: pass if sandbox: path = os.path.join(os.path.dirname(__file__), "../sandbox") else: path = scriptpath() scriptfile = os.path.join(path, scriptname) if os.path.isfile(scriptfile): if os.path.isfile(scriptfile): exec(compile(open(scriptfile).read(), scriptfile, 'exec'), namespace) return 0 else: raise RuntimeError("Tried to execute {} but it is" " not a file.".format(scriptfile)) return -1
def get_tg_version(): try: ver = pkg_resources.get_distribution('TurboGears')._version except AttributeError: # setuptools 0.6c5 ver = pkg_resources.get_distribution('TurboGears').version return ver
def check_and_install_dependencies(dry_run=False): print "Testing for Python package dependencies (warning: works ONLY for python27/win32)" import pkg_resources for dep in dependencies.keys(): try: exec('import ' + needed_module[dep]) if LooseVersion(pkg_resources.get_distribution(dep).version) == LooseVersion(version_numbers[dep]): print('[OK] ' + dep) elif LooseVersion(pkg_resources.get_distribution(dep).version) > LooseVersion(version_numbers[dep]): print('[OK] ' + dep + ', newer than required version installed (' + str(LooseVersion(pkg_resources.get_distribution(dep).version)) + ')') else: try: if not dry_run: dependencies[dep]() print('[OK] ' + dep + ', but needed update to version >= ' + (version_numbers[dep])) else: print('[MAYBE OK] ' + dep + ' needs update. Set dry_run=False for trying an update.') except WindowsError, e: print('[Fail] ' + dep + " " + str(e)) except ImportError: try: ans = dependencies[dep]() if ans != 0: raise WindowsError('does not exist') print '[OK] ' + dep + ' was missing but successfully installed' except WindowsError, e: print('[FAIL] ' + dep + " " + str(e)) except Exception, e: print('[FAIL] ' + dep + " A mysterious error thrown when importing", e)
def setUpZope(self, app, configurationContext): import Products.CMFPlacefulWorkflow xmlconfig.file('configure.zcml', Products.CMFPlacefulWorkflow, context=configurationContext) import ftw.upgrade xmlconfig.file('configure.zcml', ftw.upgrade, context=configurationContext) z2.installProduct(app, 'Products.DateRecurringIndex') import plone.app.contenttypes xmlconfig.file('configure.zcml', plone.app.contenttypes, context=configurationContext) z2.installProduct(app, 'Products.CMFPlacefulWorkflow') try: # Plone 4 with collective.indexing pkg_resources.get_distribution('collective.indexing') except pkg_resources.DistributionNotFound: pass else: import collective.indexing xmlconfig.file('configure.zcml', collective.indexing, context=configurationContext) z2.installProduct(app, 'collective.indexing') manage_addVirtualHostMonster(app, 'virtual_hosting')
def __init__(self, recipients, subject, template=None, values={}, msg="", sender=None): self.recipients = recipients self.subject = u"%s" % subject self.sender = sender self.body = "" app_base_dir = pkg_resources.get_distribution(get_app_name()).location ringo_base_dir = pkg_resources.get_distribution("ringo").location template_dir = os.path.join(app_base_dir, get_app_name(), 'templates/mails') ringo_template_dir = os.path.join(ringo_base_dir, "ringo", 'templates/mails') self.tl = TemplateLookup(directories=[template_dir, ringo_template_dir], default_filters=['h']) if template: self.template = self.tl.get_template("%s.mako" % template) self.body = self.template.render(**values) elif msg: self.body = msg else: raise Exception("Mail is missing either a" " template and values or a msg") log.debug(self.body)
def _importers(): # import things we need # but make this done on a first use basis global _IMPORTS if _IMPORTS: return _IMPORTS = True if not compat.PY3: global _GOOGLE_API_CLIENT_INSTALLED, _GOOGLE_API_CLIENT_VALID_VERSION, \ _GOOGLE_FLAGS_INSTALLED, _GOOGLE_FLAGS_VALID_VERSION, \ _HTTPLIB2_INSTALLED, _SETUPTOOLS_INSTALLED try: import pkg_resources _SETUPTOOLS_INSTALLED = True except ImportError: _SETUPTOOLS_INSTALLED = False if _SETUPTOOLS_INSTALLED: try: from apiclient.discovery import build from apiclient.http import MediaFileUpload from apiclient.errors import HttpError from oauth2client.client import OAuth2WebServerFlow from oauth2client.client import AccessTokenRefreshError from oauth2client.client import flow_from_clientsecrets from oauth2client.file import Storage from oauth2client.tools import run _GOOGLE_API_CLIENT_INSTALLED=True _GOOGLE_API_CLIENT_VERSION = pkg_resources.get_distribution('google-api-python-client').version if LooseVersion(_GOOGLE_API_CLIENT_VERSION) >= '1.2.0': _GOOGLE_API_CLIENT_VALID_VERSION = True except ImportError: _GOOGLE_API_CLIENT_INSTALLED = False try: import gflags as flags _GOOGLE_FLAGS_INSTALLED = True _GOOGLE_FLAGS_VERSION = pkg_resources.get_distribution('python-gflags').version if LooseVersion(_GOOGLE_FLAGS_VERSION) >= '2.0': _GOOGLE_FLAGS_VALID_VERSION = True except ImportError: _GOOGLE_FLAGS_INSTALLED = False try: import httplib2 _HTTPLIB2_INSTALLED = True except ImportError: _HTTPLIB2_INSTALLED = False
# -*- coding: utf-8 -*- import io import os from pkg_resources import DistributionNotFound, get_distribution """Top-level package for bring.""" __author__ = """Markus Binsteiner""" __email__ = "*****@*****.**" try: # Change here if project is renamed and does not equal the package name dist_name = __name__ __version__ = get_distribution(dist_name).version except DistributionNotFound: try: version_file = os.path.join(os.path.dirname(__file__), "version.txt") if os.path.exists(version_file): with io.open(version_file, encoding="utf-8") as vf: __version__ = vf.read() else: __version__ = "unknown" except (Exception): pass if __version__ is None: __version__ = "unknown"
from .tds_base import (Error, LoginError, DatabaseError, ProgrammingError, IntegrityError, DataError, InternalError, InterfaceError, TimeoutError, OperationalError, NotSupportedError, Warning, ClosedConnectionError, Column, PreLoginEnc) from .tds_types import (TableValuedParam, Binary) from .tds_base import (ROWID, DECIMAL, STRING, BINARY, NUMBER, DATETIME, INTEGER, REAL, XML) from . import tls import pkg_resources __author__ = 'Mikhail Denisenko <*****@*****.**>' __version__ = pkg_resources.get_distribution('python-tds').version logger = logging.getLogger(__name__) def _ver_to_int(ver): res = ver.split('.') if len(res) < 2: logger.warning( 'Invalid version {}, it should have 2 parts at least separated by "."' .format(ver)) return 0 maj, minor, _ = ver.split('.') return (int(maj) << 24) + (int(minor) << 16)
# Wrap the response in an operation future. response = operation_async.from_gapic( response, self._client._transport.operations_client, empty_pb2.Empty, metadata_type=gca_operation.DeleteOperationMetadata, ) # Done; return the response. return response async def __aenter__(self): return self async def __aexit__(self, exc_type, exc, tb): await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-aiplatform", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("IndexServiceAsyncClient",)
def bootstrap() -> Iterable[Tuple[str,Any]]: ''' Yields a stream of log information. ''' global DID_BOOTSTRAP if DID_BOOTSTRAP: return DID_BOOTSTRAP = True # Add custom folder to search path. for path in site.getsitepackages(prefixes=[INSTALL_PREFIX]): if not path.startswith(INSTALL_PREFIX): # On macOS, some global paths are added as well which we don't want. continue yield ('log', 'Added {} as module search path'.format(path)) # Make sure directory exists as it may otherwise be ignored later on when we need it. # This is because Python seems to cache whether module search paths do not exist to avoid # redundant lookups. os.makedirs(path, exist_ok=True) site.addsitedir(path) # pkg_resources doesn't listen to changes on sys.path. pkg_resources.working_set.add_entry(path) # pip tries to install packages even if they are installed already in the # custom folder. To avoid that, we do the check ourselves. # However, if any package is missing, we re-install all packages. # See the comment below on why this is necessary. installed = [] needs_install = [] cannot_update = [] for dep in DEPS: try: # Will raise DistributionNotFound if not found. location = pkg_resources.get_distribution(dep.name).location is_local = Path(INSTALL_PREFIX) in Path(location).parents if not dep.min: installed.append((dep, is_local)) else: # There is a minimum version constraint, check that. try: # Will raise VersionConflict on version mismatch. pkg_resources.get_distribution('{}>={}'.format(dep.name, dep.min)) installed.append((dep, is_local)) except pkg_resources.VersionConflict as exc: # Re-install is only possible if the previous version was installed by us. if is_local: needs_install.append(dep) else: # Continue without re-installing this package and hope for the best. # cannot_update is populated which can later be used to notify the user # that a newer version is required and has to be manually updated. cannot_update.append((dep, exc.dist.version)) installed.append((dep, False)) except pkg_resources.DistributionNotFound as exc: needs_install.append(dep) if needs_install: yield ('needs_install', needs_install) yield ('log', 'Package directory: ' + INSTALL_PREFIX) # Remove everything as we can't upgrade packages when using --prefix # which may lead to multiple pkg-0.20.3.dist-info folders for different versions # and that would lead to false positives with pkg_resources.get_distribution(). if os.path.exists(INSTALL_PREFIX): # Some randomness for the temp folder name, in case an old one is still lying around for some reason. rnd = random.randint(10000, 99999) tmp_dir = INSTALL_PREFIX + '_tmp_{}'.format(rnd) # On Windows, rename + delete allows to re-create the folder immediately, # otherwise it may still be locked and we get "Permission denied" errors. os.rename(INSTALL_PREFIX, tmp_dir) shutil.rmtree(tmp_dir) os.makedirs(INSTALL_PREFIX, exist_ok=True) # Determine packages to install. # Since we just cleaned all packages installed by us, including those that didn't need # a re-install, re-install those as well. installed_local = [dep for dep, is_local in installed if is_local] req_specs = [] for dep in needs_install + installed_local: if dep.install.startswith('http'): req_specs.append(dep.install) else: req_specs.append('{}=={}'.format(dep.name, dep.install)) # Locate python in order to invoke pip. python = os.path.join(sysconfig.get_path('scripts'), 'python3') # Handle the special Python environment bundled with QGIS on Windows. try: import qgis except: qgis = None if os.name == 'nt' and qgis: # sys.executable will be one of two things: # within QGIS: C:\Program Files\QGIS 3.0\bin\qgis-bin-g7.4.0.exe # within python-qgis.bat: C:\PROGRA~1\QGIS 3.0\apps\Python36\python.exe exe_path = sys.executable exe_dir = os.path.dirname(exe_path) if os.path.basename(exe_path) == 'python.exe': python_qgis_dir = os.path.join(exe_dir, os.pardir, os.pardir, 'bin') else: python_qgis_dir = exe_dir python = os.path.abspath(os.path.join(python_qgis_dir, 'python-qgis.bat')) if not os.path.isfile(python): python = os.path.abspath(os.path.join(python_qgis_dir, 'python-qgis-ltr.bat')) # Must use a single pip install invocation, otherwise dependencies of newly # installed packages get re-installed and we couldn't pin versions. # E.g. 'pip install pandas==0.20.3' will install pandas, but doing # 'pip install xarray==0.10.0' after that would re-install pandas (latest version) # as it's a dependency of xarray. # This is all necessary due to limitations of pip's --prefix option. args = [python, '-m', 'pip', 'install', '--prefix', INSTALL_PREFIX] + req_specs yield ('log', ' '.join(args)) for line in run_subprocess(args, LOG_PATH): yield ('log', line) yield ('install_done', None) if cannot_update: for dep, _ in cannot_update: yield ('cannot_update', cannot_update)
# flake8: noqa import pkg_resources __version__ = pkg_resources.get_distribution("librtd").version import nimporter from librtdpy import ( main ) from docopt import docopt # from cli import main docstring = """Return time distribution (RTD) calculation. Takes input FASTA files and outputs a line-delimited JSON (.jsonl) file containing the RTD for each k-mer. If no output file is specified, it will be written to stdout. All log messages are written to stderr. Usage: rtd <k> <input> [<output>] [--reverse-complement|--pairwise] rtd (-h | --help) rtd --version Options: -r, --reverse-complement Whether to compute distances to reverse complement k-mers -p, --pairwise Whether to compute the distances between every pair of k-mers -h, --help Show this screen. --version Show version. """
try: # pylint: disable=unexpected-keyword-arg,no-value-for-parameter cli(prog_name='ceph-bootstrap') except CephBootstrapException as ex: logger.exception(ex) click.echo(str(ex)) @click.group() @click.option('-l', '--log-level', default='info', type=click.Choice(["info", "error", "debug", "silent"]), help="set log level (default: info)") @click.option('--log-file', default='/var/log/ceph-bootstrap.log', type=click.Path(dir_okay=False), help="the file path for the log to be stored") @click.version_option(pkg_resources.get_distribution('ceph-bootstrap'), message="%(version)s") def cli(log_level, log_file): _setup_logging(log_level, log_file) @cli.command(name='config') @click.argument('config_args', nargs=-1, type=click.UNPROCESSED, required=False) def config_shell(config_args): """ Starts ceph-bootstrap configuration shell """ if config_args: run_config_cmdline(" ".join(config_args)) else: run_config_shell()
print( jh.color( 'An uncaught exception was raised. Check the log file at:\n{}' .format('storage/logs/live-trade.txt'), 'red')) elif jh.is_collecting_data(): print( jh.color( 'An uncaught exception was raised. Check the log file at:\n{}' .format('storage/logs/collect.txt'), 'red')) threading.excepthook = handle_thread_exception # create a Click group @click.group() @click.version_option(pkg_resources.get_distribution("jesse").version) def cli(): """ """ pass @cli.command() @click.argument('exchange', required=True, type=str) @click.argument('symbol', required=True, type=str) @click.argument('start_date', required=True, type=str) def import_candles(exchange, symbol, start_date): """ imports historical candles from exchange """
import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.vision_v1p3beta1.types import product_search_service from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-vision", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class ProductSearchTransport(abc.ABC): """Abstract transport class for ProductSearch.""" AUTH_SCOPES = ( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloud-vision", ) DEFAULT_HOST: str = "vision.googleapis.com"
import pkg_resources import warnings from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.path_template from google.ads.google_ads.v2.services import user_interest_service_client_config from google.ads.google_ads.v2.services.transports import user_interest_service_grpc_transport from google.ads.google_ads.v2.proto.services import user_interest_service_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution('google-ads', ).version class UserInterestServiceClient(object): """Service to fetch Google Ads User Interest.""" SERVICE_ADDRESS = 'googleads.googleapis.com:443' """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. _INTERFACE_NAME = 'google.ads.googleads.v2.services.UserInterestService' @classmethod def from_service_account_file(cls, filename, *args, **kwargs): """Creates an instance of this client using the provided credentials
import pkg_resources import google.auth # type: ignore from google.api_core import gapic_v1 from google.auth import credentials as ga_credentials # type: ignore from google.ads.googleads.v9.resources.types import ( ad_group_criterion_simulation, ) from google.ads.googleads.v9.services.types import ( ad_group_criterion_simulation_service, ) try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-ads",).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class AdGroupCriterionSimulationServiceTransport(metaclass=abc.ABCMeta): """Abstract transport class for AdGroupCriterionSimulationService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",) def __init__( self, *, host: str = "googleads.googleapis.com", credentials: ga_credentials.Credentials = None,
import logging # Define Version import pkg_resources version = pkg_resources.get_distribution(__package__).version try: from DmpWorkflow.config.defaults import DAMPE_LOGFILE from DmpWorkflow.utils.logger import initLogger initLogger(DAMPE_LOGFILE) from DmpWorkflow.utils.logger import logger_batch, logger_script, logger_core except Exception as err: logging.warning("Log service client was not initialized properly: %s", str(err))
import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template from google.cloud.bigquery_datatransfer_v1.gapic import data_transfer_service_client_config from google.cloud.bigquery_datatransfer_v1.gapic import enums from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( 'google-cloud-bigquery-datatransfer', ).version class DataTransferServiceClient(object): """ The Google BigQuery Data Transfer Service API enables BigQuery users to configure the transfer of their data from other Google Products into BigQuery. This service contains methods that are end user exposed. It backs up the frontend. """ SERVICE_ADDRESS = 'bigquerydatatransfer.googleapis.com:443' """The default address of the service.""" # The scopes needed to make gRPC calls to all of the methods defined in # this service
# encoding: utf-8 from collective.eeafaceted.z3ctable.columns import PrettyLinkColumn import pkg_resources HAS_CONTACT_CORE = True try: pkg_resources.get_distribution('collective.contact.core') from collective.contact.core.content.organization import IOrganization except pkg_resources.DistributionNotFound: HAS_CONTACT_CORE = False class ContactPrettyLinkColumn(PrettyLinkColumn): attrName = 'get_full_title' params = { 'showContentIcon': True, 'target': '_blank', 'additionalCSSClasses': ['link-tooltip'], 'display_tag_title': False } def contentValue(self, item): """ """ return getattr(item, self.attrName)() def getCSSClasses(self, item): """Returns a CSS class specific to current content.""" cssClasses = super(ContactPrettyLinkColumn, self).getCSSClasses(item)
import pkg_resources try: from sha3 import keccak_256 # NOQA except ImportError: pass else: import sha3 sha3.sha3_256 = sha3.keccak_256 __version__ = pkg_resources.get_distribution("eth-testrpc").version
import requests import pkg_resources from django.conf import settings distribution = pkg_resources.get_distribution('warapidpro') def session_for_warapidpro(): session = requests.Session() session.headers.update({ 'User-Agent': 'warapidpro/%s (%s, %s)' % (distribution.version, "[Auth Setup]", settings.HOSTNAME) }) return session def session_for_channel(channel): session = requests.Session() session.headers.update({ 'User-Agent': 'warapidpro/%s (%s, %s, %s)' % (distribution.version, (channel.org.name if channel.org else 'Unknown Org'), '%s/%s' % (channel.channel_type, channel.pk), settings.HOSTNAME) }) return session
), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata( (("name", request.name), )), ) # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) # Done; return the response. return response try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-game-servers", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("GameServerDeploymentsServiceAsyncClient", )
# The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django-authority' copyright = u'2009-2019, Jannis Leidel' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = get_distribution('django-authority').version # The short X.Y version. version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = []
# manually mess with the local namespace so the linter can't know that some # identifiers actually exist in the namespace. # Must be at the top, to avoid circular dependency. from streamlit import logger as _logger from streamlit import config as _config _LOGGER = _logger.get_logger("root") # Give the package a version. import pkg_resources as _pkg_resources from typing import Any, List, Tuple, Type # This used to be pkg_resources.require('streamlit') but it would cause # pex files to fail. See #394 for more details. __version__ = _pkg_resources.get_distribution("streamlit").version import contextlib as _contextlib import re as _re import sys as _sys import textwrap as _textwrap import threading as _threading import traceback as _traceback import urllib.parse as _parse from streamlit import code_util as _code_util from streamlit import env_util as _env_util from streamlit import source_util as _source_util from streamlit import string_util as _string_util from streamlit.delta_generator import DeltaGenerator as _DeltaGenerator from streamlit.report_thread import add_report_ctx as _add_report_ctx
# Copyright 2016 Dravetech AB. All rights reserved. # # The contents of this file are licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """napalm-cumulus package.""" import pkg_resources from napalm_cumulus.cumulus import CumulusDriver try: __version__ = pkg_resources.get_distribution("napalm-cumulus").version except pkg_resources.DistributionNotFound: __version__ = "Not installed" __all__ = ["CumulusDriver"]
# The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'python-lz4' copyright = u'2016, 2017, 2018 python-lz4 developers' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # from pkg_resources import get_distribution release = get_distribution('lz4').version version = release # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build']
from pkg_resources import get_distribution, DistributionNotFound from . import cli __project__ = 'cryptotvgen' __author__ = 'Ekawat (Ice) Homsirikamol and William Diehl' __package__ = 'cryptotvgen' try: __version__ = get_distribution(__project__).version except DistributionNotFound: __version__ = '(N/A - Local package)'
from pkg_resources import get_distribution, DistributionNotFound import xml.etree.ElementTree as ET from ctypes import * try: import pathlib except ImportError: pathlib = None if sys.version_info < (3, ): import urlparse else: import urllib.parse as urlparse try: __version__ = get_distribution("pymediainfo").version except DistributionNotFound: pass class Track(object): """ An object associated with a media file track. Each :class:`Track` attribute corresponds to attributes parsed from MediaInfo's output. All attributes are lower case. Attributes that are present several times such as Duration yield a second attribute starting with `other_` which is a list of all alternative attribute values. When a non-existing attribute is accessed, `None` is returned. Example:
from pkg_resources import get_distribution from .exceptions import FirmwareVersionError, SpiConnectionError from .decorators import requires_firmware from .lookup_table import OPC_LOOKUP from time import sleep import spidev import struct import warnings import re from .exceptions import firmware_error_msg __all__ = ['OPCN2', 'OPCN1'] __version__ = get_distribution('py-opc').version class OPC(object): """Generic class for any Alphasense OPC. Provides the common methods and calculations for each OPC. :param spi_connection: spidev.SpiDev connection :param debug: Set true to print data to console while running :param model: Model number of the OPC ('N1' or 'N2') set by the parent class :raises: opc.exceptions.SpiConnectionError :type spi_connection: spidev.SpiDev :type debug: boolean :type model: string
request.project = project if region is not None: request.region = region if operation is not None: request.operation = operation # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.wait] # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) # Done; return the response. return response try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-compute", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("RegionOperationsClient", )
def __version__(exit_code): print(pkg_resources.get_distribution("CellProfiler").version) sys.exit(exit_code)
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import packaging.version import pkg_resources import google.auth # type: ignore import google.api_core # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.cloud.compute_v1.types import compute try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-compute", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() try: # google.auth.__version__ was added in 1.26.0 _GOOGLE_AUTH_VERSION = google.auth.__version__ except AttributeError: try: # try pkg_resources if it is available _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution( "google-auth").version except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None _API_CORE_VERSION = google.api_core.__version__
rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_validation_result, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-dialogflow", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("AgentsAsyncClient",)
source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General substitutions. project = __DISTRIBUTION__ copyright = __COPYRIGHT__ # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. # # The short X.Y version. version = __VERSION__ # The full version, including alpha/beta/rc tags. release = pkg_resources.get_distribution(__DISTRIBUTION__).version # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True
dmgLocation = os.path.join(distLocation, appName) pythonVersion = "python%s.%i" % (sys.version_info[0], sys.version_info[1]) pythonLibPath = os.path.join(resourcesPath, "lib", pythonVersion) if "-A" not in sys.argv: # make sure the external tools have the correct permissions externalTools = ("ffmpeg", "gifsicle", "mkbitmap", "potrace") for externalTool in externalTools: externalToolPath = os.path.join(resourcesPath, externalTool) os.chmod(externalToolPath, 0o775) # See: # https://bitbucket.org/ronaldoussoren/py2app/issues/256/fs-module-not-fully-working-from-app # https://github.com/PyFilesystem/pyfilesystem2/issues/228 for pkgName in ["fs", "appdirs", "pytz", "six", "setuptools"]: infoPath = pkg_resources.get_distribution(pkgName).egg_info baseInfoName = os.path.basename(infoPath) shutil.copytree(infoPath, os.path.join(pythonLibPath, baseInfoName)) if runTests: appExecutable = os.path.join(appLocation, "Contents", "MacOS", appName) runAllTestsPath = os.path.join(drawBotRoot, "tests", "runAllTests.py") commands = [appExecutable, "--testScript=%s" % runAllTestsPath] print("Running DrawBot tests...") process = subprocess.Popen(commands, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) stdout, stderr = process.communicate() lines = stdout.splitlines() for startTestOutputIndex, line in enumerate(lines): if line.endswith(" starting test script"): break if startTestOutputIndex != 0:
import pkg_resources HAS_INDEXING = False try: # Plone 5 from Products.CMFCore.indexing import processQueue from Products.CMFCore.indexing import getQueue except ImportError: try: # Plone 4 with collective.indexing pkg_resources.get_distribution('collective.indexing') except pkg_resources.DistributionNotFound: def processQueue(): # Plone 4 without collective.indexing pass else: from collective.indexing.queue import getQueue from collective.indexing.queue import processQueue HAS_INDEXING = True else: HAS_INDEXING = True if HAS_INDEXING: from ftw.upgrade.interfaces import IDuringUpgrade from ftw.upgrade.progresslogger import ProgressLogger from zope.globalrequest import getRequest class LoggingQueueProcessor(object): """Queue processor to log collective.indexing progress.