infer_doublets, mark_doublets, ) from .annotate_cluster import infer_cell_types, annotate, infer_cluster_names from .misc import search_genes, search_de_genes, find_outlier_clusters from .plotting import ( scatter, compo_plot, scatter_groups, violin, heatmap, dotplot, dendrogram, hvfplot, qcviolin, volcano, rank_plot, ridgeplot, ) try: from importlib.metadata import version, PackageNotFoundError except ImportError: # < Python 3.8: Use backport module from importlib_metadata import version, PackageNotFoundError try: __version__ = version('pegasuspy') del version except PackageNotFoundError: pass
def test_version(): # assure versions in setup.py and package match v1 = __version__ v2 = importlib_metadata.version("pgp_milter") assert v1 == v2
async def format_and_send(ctx: commands.Context, obj: Any, *, is_owner: bool = False) -> None: source = obj if isinstance(obj, commands.Cog): source = type(obj) elif isinstance(obj, commands.Command): source = obj.callback if not source.__module__: # probably some kind of custom-coded command if is_owner: return await ctx.invoke( ctx.bot.get_command("instantcmd source"), command=obj.qualified_name ) else: raise OSError elif isinstance(obj, property): source = obj.fget elif isinstance(obj, (discord.utils.cached_property, discord.utils.CachedSlotProperty)): source = obj.function try: lines, line = inspect.getsourcelines(source) source_file = inspect.getsourcefile(source) except TypeError: if isinstance(source, type): raise source = type(source) lines, line = inspect.getsourcelines(source) source_file = inspect.getsourcefile(source) module = getattr(inspect.getmodule(source), "__name__", None) if source_file and module and source_file.endswith("__init__.py"): full_module = f"{module}.__init__" else: full_module = module is_installed = False header: str = "" if full_module: if full_module.startswith("discord."): is_installed = True if discord.__version__[-1].isdigit(): dpy_commit = "v" + discord.__version__ else: try: dpy_version = version("discord.py").split("+g") except PackageNotFoundError: dpy_commit = "master" else: dpy_commit = dpy_version[1] if len(dpy_version) == 2 else "master" header = f"<https://github.com/Rapptz/discord.py/blob/{dpy_commit}/{full_module.replace('.', '/')}.py#L{line}-L{line + len(lines) - 1}>" elif full_module.startswith("redbot."): is_installed = True if "dev" in redbot.__version__: red_commit = "V3/develop" else: red_commit = redbot.__version__ header = f"<https://github.com/Cog-Creators/Red-DiscordBot/blob/{red_commit}/{full_module.replace('.', '/')}.py#L{line}-L{line + len(lines) - 1}>" elif dl := ctx.bot.get_cog("Downloader"): is_installed, installable = await dl.is_installed(full_module.split(".")[0]) if is_installed: if installable.repo is None: is_installed = False else: url = yarl.URL(installable.repo.url) if url.user or url.password: is_installed = False header = f"<{installable.repo.clean_url.rstrip('/')}/blob/{installable.commit}/{full_module.replace('.', '/')}.py#L{line}-L{line + len(lines) - 1}>"
try: from importlib import metadata except ImportError: # Running on pre-3.8 Python; use importlib-metadata package import importlib_metadata as metadata __version__ = metadata.version("idesolver")
t.Tuple[ t.Literal["CUDAExecutionProvider"], t.Union[t.Dict[str, t.Union[int, str, bool]], str], ] ] try: import importlib.metadata as importlib_metadata except ImportError: import importlib_metadata _PACKAGE = ["onnxruntime", "onnxruntime-gpu"] for p in _PACKAGE: try: _onnxruntime_version = importlib_metadata.version(p) break except importlib_metadata.PackageNotFoundError: pass MODULE_NAME = "bentoml.onnx" logger = logging.getLogger(__name__) # helper methods def _yield_providers( iterable: t.Sequence[t.Any], ) -> t.Generator[str, None, None]: # pragma: no cover if isinstance(iterable, tuple): yield iterable[0]
#!/usr/bin/env python # flake8: noqa try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata __version__ = importlib_metadata.version(__name__) import sys # Importing the config module should be the first thing we do, since other # modules depend on the global config dict for initialization. from ._config import * # Workaround to set the renderer passed via CLI args *before* importing # Manim's classes (as long as the metaclass approach for switching # between OpenGL and cairo rendering is in place, classes depend # on the value of config.renderer). for i, arg in enumerate(sys.argv): if arg.startswith("--renderer"): if "=" in arg: _, parsed_renderer = arg.split("=") else: parsed_renderer = sys.argv[i + 1] config.renderer = parsed_renderer elif arg == "--use_opengl_renderer": config.renderer = "opengl" elif arg == "--use_webgl_renderer":
try: # Python 3.8 from importlib import metadata except ImportError: import importlib_metadata as metadata try: __version__ = metadata.version("meshzoo") except Exception: __version__ = "unknown"
from .subs_info import SubsInfo from .util import echo_json from .util import echo_status from .util import echo_warning from .util import hunt_subtitles from .util import is_ipaddress CONFIG_DIR = Path(click.get_app_dir("catt")) CONFIG_PATH = Path(CONFIG_DIR, "catt.cfg") STATE_PATH = Path(CONFIG_DIR, "state.json") WAIT_PLAY_TIMEOUT = 30 PROGRAM_NAME = "catt" try: VERSION = version(PROGRAM_NAME) except Exception: VERSION = "0.0.0u" class CattTimeParamType(click.ParamType): def convert(self, value, param, ctx): try: tdesc = [int(x) for x in value.split(":")] tlen = len(tdesc) if (tlen > 1 and any(t > 59 for t in tdesc)) or tlen > 3: raise ValueError except ValueError: self.fail("{} is not a valid time description.".format(value)) tdesc.reverse()
def test_version(flake8dir): result = flake8dir.run_flake8(["--version"]) version_regex = r"flake8-tidy-imports:( )*" + version( "flake8-tidy-imports") unwrapped = "".join(result.out_lines) assert re.search(version_regex, unwrapped)
def test_zip_version(self): self.assertEqual(version('example'), '21.12')
def test_case_insensitive(self): self.assertEqual(version('Example'), '21.12')
def get_version(name: str) -> str: try: return version(name) except PackageNotFoundError: return ''
def test_retrieves_version_of_distinfo_pkg(self): pkg_version = version('distinfo-pkg') assert isinstance(pkg_version, str) assert re.match(self.version_pattern, pkg_version)
def test_version_egg_info_file(self): self.assertEqual(version('egginfo-file'), '0.1')
import sys if sys.version_info[:2] >= (3, 8): # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8` from importlib.metadata import PackageNotFoundError, version # pragma: no cover else: from importlib_metadata import PackageNotFoundError, version # pragma: no cover try: # Change here if project is renamed and does not equal the package name dist_name = "universal-devkit" __version__ = version(dist_name) except PackageNotFoundError: # pragma: no cover __version__ = "unknown" finally: del version, PackageNotFoundError
#!/usr/bin/env python3 # -*- coding: utf-8, vim: expandtab:ts=4 -*- from importlib import metadata try: __version__ = metadata.version('mthasher') except ModuleNotFoundError: __version__ = 'THIS IS NOT A PACKAGE!' if __name__ == '__main__': print(__version__)
def main() -> None: pycti_ver: str = version("pycti") elastic_ver: str = version("elasticsearch") my_version: str = (f"elastic {__version__}\n" f"pyopencti {pycti_ver}\n" f"elasticsearch {elastic_ver}\n") arguments: dict = docopt(__doc__, version=my_version) _verbosity: int = 0 if not arguments["-q"] is True: _verbosity = 30 + (arguments["-v"] * -10) # If this is set to 0, it defaults to the root logger configuration, # which we don't want to manipulate because it will spam from other modules if _verbosity == 0: _verbosity = 1 else: _verbosity = 40 _loggername = LOGGER_NAME if arguments["--debug"] is True: # Enable full logging for all loggers _loggername = None _verbosity = 10 setup_logger(verbosity=_verbosity, name=_loggername) logger = logging.getLogger(LOGGER_NAME) # This can be overridden by environment variables f_config: OrderedDict = {} if not os.path.exists(arguments["--config"]): logger.warn( f"""Config file '{arguments["--config"]}' does not exist. Relying on environment and defaults.""" ) elif not os.path.isfile(arguments["--config"]): logger.warn( f"""Config path '{arguments["--config"]}' exists but is not a file. Relying on environment and defaults.""" ) else: f_config = yaml.load(open(arguments["--config"]), Loader=yaml.FullLoader) if "connector" not in f_config: f_config["connector"] = {} f_config["connector"]["log_level"] = (logging.getLevelName(_verbosity) if _verbosity != 1 else "TRACE") config: dict = {} for k, v in f_config.items(): config = add_branch(config, k.split("."), v) config = __process_config(arguments, config) # Check if we need to update logger config if logging.getLevelName(logger.level) != config["connector"]["log_level"]: logger.setLevel(config["connector"]["log_level"].upper()) logger.trace(json.dumps(config, sort_keys=True, indent=4)) # This can be overridden by environment variables datadir = __DATA_DIR__ if "--data-dir" in arguments and arguments["--data-dir"] is not None: if not os.path.exists(arguments["--data-dir"]): logger.warn( f"""Data directory '{arguments["--data-dir"]}' does not exist.""" ) elif not os.path.isdir(arguments["--data-dir"]): logger.warn( f"""'{arguments["--data-dir"]}' is not a valid directory for --data-dir""" ) else: datadir = arguments["--data-dir"] else: logger.info(f"Using default data directory: {datadir}") if not arguments["-q"] is True: print(BANNER) # If we're using the custom TRACE level, just tell OpenCTI to run as DEBUG if config["connector"]["log_level"] == "TRACE": os.environ["CONNECTOR_LOG_LEVEL"] = "DEBUG" ElasticInstance = ElasticConnector(config=config, datadir=datadir) ElasticInstance.start() sys.exit(0)
try: # Python 3.8 from importlib import metadata except ImportError: import importlib_metadata as metadata try: __version__ = metadata.version("perfplot") except Exception: __version__ = "unknown"
class ComprehensionChecker: """ Flake8 plugin to help you write better list/set/dict comprehensions. """ name = "flake8-comprehensions" version = version("flake8-comprehensions") def __init__(self, tree, *args, **kwargs): self.tree = tree messages = { "C400": "C400 Unnecessary generator - rewrite as a list comprehension.", "C401": "C401 Unnecessary generator - rewrite as a set comprehension.", "C402": "C402 Unnecessary generator - rewrite as a dict comprehension.", "C403": "C403 Unnecessary list comprehension - rewrite as a set comprehension.", "C404": ("C404 Unnecessary list comprehension - rewrite as a dict comprehension." ), "C405": "C405 Unnecessary {type} literal - ", "C406": "C406 Unnecessary {type} literal - ", "C407": "C407 Unnecessary list comprehension - '{func}' can take a generator.", "C408": "C408 Unnecessary {type} call - rewrite as a literal.", "C409": "C409 Unnecessary {type} passed to tuple() - ", "C410": "C410 Unnecessary {type} passed to list() - ", "C411": "C411 Unnecessary list call - remove the outer call to list().", "C412": "C412 Unnecessary list comprehension - 'in' can take a generator.", "C413": "C413 Unnecessary {outer} call around {inner}(){remediation}.", "C414": "C414 Unnecessary {inner} call within {outer}().", "C415": "C415 Unnecessary subscript reversal of iterable within {func}().", "C416": "C416 Unnecessary {type} comprehension - rewrite using {type}().", } def run(self): for node in ast.walk(self.tree): if isinstance(node, ast.Call) and isinstance(node.func, ast.Name): num_positional_args = len(node.args) if (num_positional_args == 1 and isinstance(node.args[0], ast.GeneratorExp) and node.func.id in ("list", "set")): msg_key = {"list": "C400", "set": "C401"}[node.func.id] yield ( node.lineno, node.col_offset, self.messages[msg_key], type(self), ) elif (num_positional_args == 1 and isinstance(node.args[0], (ast.GeneratorExp, ast.ListComp)) and isinstance(node.args[0].elt, ast.Tuple) and len(node.args[0].elt.elts) == 2 and node.func.id == "dict"): if isinstance(node.args[0], ast.GeneratorExp): msg = "C402" else: msg = "C404" yield ( node.lineno, node.col_offset, self.messages[msg], type(self), ) elif (num_positional_args == 1 and isinstance(node.args[0], ast.ListComp) and node.func.id in ("list", "set")): msg_key = {"list": "C411", "set": "C403"}[node.func.id] yield ( node.lineno, node.col_offset, self.messages[msg_key], type(self), ) elif num_positional_args == 1 and ( isinstance(node.args[0], ast.Tuple) and node.func.id == "tuple" or isinstance(node.args[0], ast.List) and node.func.id == "list"): suffix = "remove the outer call to {func}()." msg_key = {"tuple": "C409", "list": "C410"}[node.func.id] msg = self.messages[msg_key] + suffix yield ( node.lineno, node.col_offset, msg.format(type=type(node.args[0]).__name__.lower(), func=node.func.id), type(self), ) elif (num_positional_args == 1 and isinstance(node.args[0], (ast.Tuple, ast.List)) and node.func.id in ("tuple", "list", "set", "dict")): suffix = "rewrite as a {func} literal." msg_key = { "tuple": "C409", "list": "C410", "set": "C405", "dict": "C406", }[node.func.id] msg = self.messages[msg_key] + suffix yield ( node.lineno, node.col_offset, msg.format(type=type(node.args[0]).__name__.lower(), func=node.func.id), type(self), ) elif ( num_positional_args == 1 and isinstance(node.args[0], ast.ListComp) and node.func.id in ( "all", "any", "frozenset", "tuple", # These take 1 positional argument + some keyword arguments "max", "min", "sorted", )): yield ( node.lineno, node.col_offset, self.messages["C407"].format(func=node.func.id), type(self), ) elif (num_positional_args in (1, 2) and isinstance(node.args[0], ast.ListComp) and node.func.id in ( # These can take a second positional argument "enumerate", "sum", )): yield ( node.lineno, node.col_offset, self.messages["C407"].format(func=node.func.id), type(self), ) elif (num_positional_args == 0 and not has_star_args(node) and not has_keyword_args(node) and node.func.id in ("tuple", "list", "dict")): yield ( node.lineno, node.col_offset, self.messages["C408"].format(type=node.func.id), type(self), ) elif (node.func.id in {"list", "reversed"} and num_positional_args > 0 and isinstance(node.args[0], ast.Call) and isinstance(node.args[0].func, ast.Name) and node.args[0].func.id == "sorted"): remediation = "" if node.func.id == "reversed": reverse_flag_value = False for keyword in node.args[0].keywords: if keyword.arg != "reverse": continue if isinstance(keyword.value, ast.NameConstant): reverse_flag_value = keyword.value.value elif isinstance(keyword.value, ast.Num): reverse_flag_value = bool(keyword.value.n) else: # Complex value reverse_flag_value = None if reverse_flag_value is None: remediation = " - toggle reverse argument to sorted()" else: remediation = " - use sorted(..., reverse={!r})".format( not reverse_flag_value) msg = self.messages["C413"].format( inner=node.args[0].func.id, outer=node.func.id, remediation=remediation, ) yield ( node.lineno, node.col_offset, msg, type(self), ) elif (num_positional_args > 0 and isinstance(node.args[0], ast.Call) and isinstance(node.args[0].func, ast.Name) and ((node.func.id in {"set", "sorted"} and node.args[0].func.id in {"list", "reversed", "sorted", "tuple"}) or (node.func.id in {"list", "tuple"} and node.args[0].func.id in {"list", "tuple"}) or (node.func.id == "set" and node.args[0].func.id == "set"))): yield ( node.lineno, node.col_offset, self.messages["C414"].format( inner=node.args[0].func.id, outer=node.func.id), type(self), ) elif (node.func.id in {"reversed", "set", "sorted"} and num_positional_args > 0 and isinstance(node.args[0], ast.Subscript) and isinstance(node.args[0].slice, ast.Slice) and node.args[0].slice.lower is None and node.args[0].slice.upper is None and isinstance(node.args[0].slice.step, ast.UnaryOp) and isinstance(node.args[0].slice.step.op, ast.USub) and isinstance(node.args[0].slice.step.operand, ast.Num) and node.args[0].slice.step.operand.n == 1): yield ( node.lineno, node.col_offset, self.messages["C415"].format(func=node.func.id), type(self), ) elif isinstance(node, ast.Compare): if (len(node.ops) == 1 and isinstance(node.ops[0], ast.In) and len(node.comparators) == 1 and isinstance(node.comparators[0], ast.ListComp)): yield ( node.lineno, node.col_offset, self.messages["C412"], type(self), ) elif isinstance(node, (ast.ListComp, ast.SetComp)): if (len(node.generators) == 1 and not node.generators[0].ifs and not is_async_generator(node.generators[0]) and (isinstance(node.elt, ast.Name) and isinstance(node.generators[0].target, ast.Name) and node.elt.id == node.generators[0].target.id)): lookup = {ast.ListComp: "list", ast.SetComp: "set"} yield ( node.lineno, node.col_offset, self.messages["C416"].format( type=lookup[node.__class__]), type(self), )
# General information about the project. project = u'pyOptimalEstimation' copyright = u'2021, M. Maahn' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # try: from importlib.metadata import version except ImportError: # for Pyton 3.6 and 3.7 from importlib_metadata import version release = version('pyOptimalEstimation') version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build']
try: try: from importlib import metadata as importlib_metadata # py3.8+ stdlib except ImportError: import importlib_metadata # py3.7- shim __version__ = importlib_metadata.version(__package__) except ImportError: # No importlib_metadata. This shouldn't normally happen, but some people prefer not installing # packages via pip at all, instead using PYTHONPATH directly or copying the package files into # `lib/pythonX.Y/site-packages`. Although not a recommended way, we still try to support it. __version__ = "unknown" # :nocov: from .util import * from .data_structure import * from .stream import * from .vendor import * from .soc import * from .platform import * from .cores import *
############################################## # The MIT License (MIT) # Copyright (c) 2016 Kevin Walchko # see LICENSE for full details ############################################## # from ins_nav.ahrs import AHRS # from ins_nav.compass import TiltCompensatedCompass try: from importlib.metadata import version # type: ignore except ImportError: from importlib_metadata import version # type: ignore __author__ = 'Kevin J. Walchko' __license__ = 'MIT' __version__ = version("ins_nav")
# Backport for Python < 3.8 from importlib_metadata import version try: from pint_pandas import PintArray, PintType del PintType del PintArray _HAS_PINT_PANDAS = True except ImportError: _HAS_PINT_PANDAS = False _, _pint_pandas_error, _ = sys.exc_info() try: # pragma: no cover __version__ = version("pint") except Exception: # pragma: no cover # we seem to have a local copy not installed without setuptools # so the reported version will be unknown __version__ = "unknown" #: A Registry with the default units and constants. _DEFAULT_REGISTRY = LazyRegistry() #: Registry used for unpickling operations. _APP_REGISTRY = _DEFAULT_REGISTRY def _unpickle(cls, *args): """Rebuild object upon unpickling. All units must exist in the application registry.
from .UniformLineScanAndTopography import Topography, UniformLineScan # noqa: F401 # These imports are required to register the analysis functions! import SurfaceTopography.Converters # noqa: F401 import SurfaceTopography.Generic.Curvature # noqa: F401 import SurfaceTopography.Generic.ScanningProbe # noqa: F401 import SurfaceTopography.Generic.Slope # noqa: F401 import SurfaceTopography.Nonuniform.common # noqa: F401 import SurfaceTopography.Nonuniform.Autocorrelation # noqa: F401 import SurfaceTopography.Nonuniform.ScaleDependentStatistics # noqa: F401 import SurfaceTopography.Nonuniform.ScalarParameters # noqa: F401 import SurfaceTopography.Nonuniform.PowerSpectrum # noqa: F401 import SurfaceTopography.Nonuniform.VariableBandwidth # noqa: F401 import SurfaceTopography.Uniform.common # noqa: F401 import SurfaceTopography.Uniform.Interpolation # noqa: F401 import SurfaceTopography.Uniform.Filtering # noqa: F401 import SurfaceTopography.Uniform.Autocorrelation # noqa: F401 import SurfaceTopography.Uniform.PowerSpectrum # noqa: F401 import SurfaceTopography.Uniform.ScaleDependentStatistics # noqa: F401 import SurfaceTopography.Uniform.ScalarParameters # noqa: F401 import SurfaceTopography.Uniform.VariableBandwidth # noqa: F401 try: from importlib.metadata import version __version__ = version(__name__) except ImportError: from pkg_resources import get_distribution __version__ = get_distribution(__name__).version
#from ._nmf_batch_mu import NMFBatchMU #from ._nmf_batch_hals import NMFBatchHALS #from ._nmf_batch_nnls_bpp import NMFBatchNnlsBpp #from ._nmf_online_mu import NMFOnlineMU #from ._nmf_online_hals import NMFOnlineHALS #from ._nmf_online_nnls_bpp import NMFOnlineNnlsBpp from .nmf import run_nmf, integrative_nmf #from ._inmf_batch_mu import INMFBatchMU #from ._inmf_batch_hals import INMFBatchHALS #from ._inmf_batch_nnls_bpp import INMFBatchNnlsBpp #from ._inmf_online_mu import INMFOnlineMU #from ._inmf_online_hals import INMFOnlineHALS #from ._inmf_online_nnls_bpp import INMFOnlineNnlsBpp try: from importlib.metadata import version, PackageNotFoundError except ImportError: # < Python 3.8: Use backport module from importlib_metadata import version, PackageNotFoundError try: __version__ = version('nmf-torch') del version except PackageNotFoundError: pass
intOptions = parser.add_argument_group('static scene diagramming options') intOptions.add_argument('-d', '--delay', type=float, help='loop automatically with this delay (in seconds) ' 'instead of waiting for the user to close the diagram') intOptions.add_argument('-z', '--zoom', help='zoom expansion factor (default 1)', type=float, default=1) # Debugging options debugOpts = parser.add_argument_group('debugging options') debugOpts.add_argument('--show-params', help='show values of global parameters', action='store_true') debugOpts.add_argument('-b', '--full-backtrace', help='show full internal backtraces', action='store_true') debugOpts.add_argument('--pdb', action='store_true', help='enter interactive debugger on errors (implies "-b")') ver = metadata.version('scenic') debugOpts.add_argument('--version', action='version', version=f'Scenic {ver}', help='print Scenic version information and exit') debugOpts.add_argument('--dump-initial-python', help='dump initial translated Python', action='store_true') debugOpts.add_argument('--dump-ast', help='dump final AST', action='store_true') debugOpts.add_argument('--dump-python', help='dump Python equivalent of final AST', action='store_true') debugOpts.add_argument('--no-pruning', help='disable pruning', action='store_true') debugOpts.add_argument('--gather-stats', type=int, metavar='N', help='collect timing statistics over this many scenes') parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, help=argparse.SUPPRESS) # Positional arguments
# Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] source_suffix = ".rst" # The master toctree document. master_doc = "index" # General information about the project. project = "pluggy" copyright = "2016, Holger Krekel" author = "Holger Krekel" release = metadata.version(project) # The short X.Y version. version = ".".join(release.split(".")[:2]) language = None pygments_style = "sphinx" # html_logo = "_static/img/plug.png" html_theme = "alabaster" html_theme_options = { "logo": "img/plug.png", "description": "The pytest plugin system", "github_user": "******", "github_repo": "pluggy", "github_button": "true", "github_banner": "true",
from importlib.metadata import version import meshzoo import numpy packages = [("meshzoo", version("meshzoo"))] colors = ("#8c564b", "#c49c94") # cat20 brown def disk(h): # tighten a bit h /= 1.1 return meshzoo.disk(6, int(1 / h)) def sphere(h): # edge length of regular icosahedron with radius 1 l = 1 / numpy.sin(0.4 * numpy.pi) n = int(l / h) return meshzoo.icosa_sphere(n) if __name__ == "__main__": import meshio points, cells = sphere(0.3) meshio.Mesh(points, {"triangle": cells}).write("out.vtk")
"""imagedata""" import logging # from imagedata.formats.abstractplugin import AbstractPlugin logging.getLogger(__name__).addHandler(logging.NullHandler()) try: from importlib.metadata import version, entry_points __version__ = version('imagedata') except ModuleNotFoundError: from importlib_metadata import version, entry_points __version__ = version('imagedata') except Exception: # import imagedata as _ from . import __path__ as _path from os.path import join # with open(join(_.__path__[0], "..", "VERSION.txt"), 'r') as fh: with open(join(_path[0], "..", "VERSION.txt"), 'r') as fh: __version__ = fh.readline().strip() __author__ = 'Erling Andersen, Haukeland University Hospital, Bergen, Norway' __email__ = '*****@*****.**' plugins = {} _plugins = entry_points()['imagedata_plugins'] for _plugin in _plugins: _class = _plugin.load() if _class.plugin_type not in plugins: plugins[_class.plugin_type] = [] if (_plugin.name, _class.name, _class) not in plugins[_class.plugin_type]:
from importlib.metadata import version from typing import Final from . import abc, checks, compat, formatting, re, util from .bot import Bot, DblBot from .cli import cli from .cog import Cog from .config import Config, ConfigException from .context import EmbedContext, PaginatedContext from .exceptions import NotGuildOwner, OnlyDirectMessage from .logging import setup_logging __title__: Final = 'botus_receptus' __author__: Final = 'Bryan Forbes' __license__: Final = 'BSD 3-clause' __version__: Final = version('botus_receptus') __all__ = ( 'abc', 'checks', 'compat', 'db', 'formatting', 're', 'util', 'setup_logging', 'cli', 'Cog', 'Config', 'ConfigException', 'Bot',