Ejemplo n.º 1
0
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

try:
    from .gateway import RohonGateway
except ImportError:
    pass


import importlib_metadata


try:
    __version__ = importlib_metadata.version("vnpy_rohon")
except importlib_metadata.PackageNotFoundError:
    __version__ = "dev"
#
#     http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
from __future__ import absolute_import

import platform
import sys

import importlib_metadata

SDK_VERSION = importlib_metadata.version("sagemaker")
OS_NAME = platform.system() or "UnresolvedOS"
OS_VERSION = platform.release() or "UnresolvedOSVersion"
OS_NAME_VERSION = "{}/{}".format(OS_NAME, OS_VERSION)
PYTHON_VERSION = "Python/{}.{}.{}".format(sys.version_info.major,
                                          sys.version_info.minor,
                                          sys.version_info.micro)


def determine_prefix(user_agent=""):
    """Placeholder docstring"""
    prefix = "AWS-SageMaker-Python-SDK/{}".format(SDK_VERSION)

    if PYTHON_VERSION not in user_agent:
        prefix = "{} {}".format(prefix, PYTHON_VERSION)
Ejemplo n.º 3
0
)
from .hf_api import HfFolder

logger = logging.getLogger(__name__)

_PY_VERSION: str = sys.version.split()[0]

if tuple(int(i) for i in _PY_VERSION.split(".")) < (3, 8, 0):
    import importlib_metadata
else:
    import importlib.metadata as importlib_metadata

_torch_version = "N/A"
_torch_available = False
try:
    _torch_version = importlib_metadata.version("torch")
    _torch_available = True
except importlib_metadata.PackageNotFoundError:
    pass

_tf_version = "N/A"
_tf_available = False
try:
    _tf_version = importlib_metadata.version("tensorflow")
    _tf_available = True
except importlib_metadata.PackageNotFoundError:
    pass


def is_torch_available():
    return _torch_available
Ejemplo n.º 4
0
from test.utils import HERE, SIMPLE_PATH

import pytest
import yaml
from _pytest.main import ExitCode
from hypothesis import HealthCheck, Phase, Verbosity
from importlib_metadata import version
from requests import Response

from schemathesis import Case
from schemathesis.loaders import from_path
from schemathesis.models import Endpoint
from schemathesis.runner import DEFAULT_CHECKS

phases = "explicit, reuse, generate, target, shrink"
if version("hypothesis") < "4.5":
    phases = "explicit, reuse, generate, shrink"


def test_commands_help(cli):
    result = cli.main()

    assert result.exit_code == ExitCode.OK
    lines = result.stdout.split("\n")
    assert lines[11] == "  run  Perform schemathesis test."

    result_help = cli.main("--help")
    result_h = cli.main("-h")

    assert result.stdout == result_h.stdout == result_help.stdout
Ejemplo n.º 5
0
# The master toctree document.
master_doc = "index"

# General information about the project.
project = "sundog"
copyright = "The sundog authors"
author = "The sundog authors"

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import importlib_metadata

version = importlib_metadata.version("sundog")
# The full version, including alpha/beta/rc tags.
release = version


def process_newsfragments():
    import pathlib
    import subprocess
    import sysconfig

    # TODO: needs released https://github.com/twisted/towncrier/commit/5c431028a3b699c74b162014e907272cbea8ac81
    bin = pathlib.Path(sysconfig.get_path("scripts"))
    subprocess.run(
        [bin / "towncrier", "build", "--yes", "--name", "sundog"],
        check=True,
        cwd="../..",
Ejemplo n.º 6
0
 def test_case_insensitive(self):
     self.assertEqual(importlib_metadata.version('Example'), '21.12')
Ejemplo n.º 7
0
else:
    from importlib.metadata import version

# SQLAlchemy 1.3.11+
try:
    from sqlalchemy import Computed
except ImportError:
    Computed = None  # type: ignore

# SQLAlchemy 1.4+
try:
    from sqlalchemy import Identity
except ImportError:
    Identity = None

_sqla_version = tuple(int(x) for x in version('sqlalchemy').split('.')[:2])
_re_boolean_check_constraint = re.compile(r"(?:.*?\.)?(.*?) IN \(0, 1\)")
_re_column_name = re.compile(r'(?:(["`]?).*\1\.)?(["`]?)(.*)\2')
_re_enum_check_constraint = re.compile(r"(?:.*?\.)?(.*?) IN \((.+)\)")
_re_enum_item = re.compile(r"'(.*?)(?<!\\)'")
_re_invalid_identifier = re.compile(r'(?u)\W')


class CodeGenerator(metaclass=ABCMeta):
    valid_options: ClassVar[Set[str]] = set()

    def __init__(self, metadata: MetaData, bind: Connectable, options: Set[str]):
        self.metadata = metadata
        self.bind = bind
        self.options = options
Ejemplo n.º 8
0
def runner(
    uri,
    task=None,
    inputs=[],
    input_file=None,
    empty=[],
    json_only=False,
    runtime_defaults=None,
    runtime_memory_max=None,
    path=None,
    check_quant=True,
    **kwargs,
):
    # load WDL document
    doc = load(uri,
               path or [],
               check_quant=check_quant,
               read_source=read_source)

    # parse and validate the provided inputs
    target, input_env, input_json = runner_input(doc,
                                                 inputs,
                                                 input_file,
                                                 empty,
                                                 task=task)

    if json_only:
        print(json.dumps(input_json, indent=2))
        sys.exit(0)

    # set up logging
    level = NOTICE_LEVEL
    if kwargs["verbose"]:
        level = VERBOSE_LEVEL
    if kwargs["debug"]:
        level = logging.DEBUG
    if kwargs["no_color"]:
        # picked up by _util.install_coloredlogs()
        os.environ["NO_COLOR"] = os.environ.get("NO_COLOR", "")
    logging.basicConfig(level=level)
    logger = logging.getLogger("miniwdl-run")
    install_coloredlogs(logger)

    for pkg in ["miniwdl", "docker", "lark-parser", "argcomplete", "pygtail"]:
        try:
            logger.debug(importlib_metadata.version(pkg))
        except importlib_metadata.PackageNotFoundError:
            logger.debug(f"{pkg} UNKNOWN")
    logger.debug("dockerd: " + str(docker.from_env().version()))

    rerun_sh = f"pushd {shellquote(os.getcwd())} && miniwdl {' '.join(shellquote(t) for t in sys.argv[1:])}; popd"

    # configuration
    run_kwargs = dict((k, kwargs[k]) for k in [
        "copy_input_files", "run_dir", "runtime_cpu_max", "as_me", "max_tasks"
    ])
    if runtime_memory_max:
        run_kwargs["runtime_memory_max"] = parse_byte_size(runtime_memory_max)
    if runtime_defaults:
        if runtime_defaults.lstrip()[0] == "{":
            run_kwargs["runtime_defaults"] = json.loads(runtime_defaults)
        else:
            with open(runtime_defaults, "r") as infile:
                run_kwargs["runtime_defaults"] = json.load(infile)

    ensure_swarm(logger)

    # run & handle any errors
    try:
        rundir, output_env = runtime.run(target, input_env, **run_kwargs)
    except Exception as exn:
        outer_rundir = None
        inner_rundir = None
        while isinstance(exn, runtime.RunFailed):
            exn_rundir = getattr(exn, "run_dir")
            logger.error(_(str(exn), dir=exn_rundir))
            outer_rundir = outer_rundir or exn_rundir
            inner_rundir = exn_rundir
            exn = exn.__cause__
            assert exn
        if isinstance(exn,
                      runtime.task.CommandFailed) and not (kwargs["verbose"]
                                                           or kwargs["debug"]):
            logger.notice(_("standard error", file=getattr(exn,
                                                           "stderr_file")))
            logger.notice(
                "run with --verbose to include task standard error streams in this log"
            )
        if isinstance(getattr(exn, "pos", None), SourcePosition):
            pos = getattr(exn, "pos")
            logger.error("({} Ln {} Col {}) {}{}".format(
                pos.uri,
                pos.line,
                pos.column,
                exn.__class__.__name__,
                (", " + str(exn) if str(exn) else ""),
            ))
        else:
            logger.error(
                f"{exn.__class__.__name__}{(', ' + str(exn) if str(exn) else '')}"
            )
        if outer_rundir:
            with open(os.path.join(outer_rundir, "rerun"), "w") as rerunfile:
                print(rerun_sh, file=rerunfile)
        if kwargs["debug"]:
            raise
        sys.exit(2)

    # report
    with open(os.path.join(rundir, "rerun"), "w") as rerunfile:
        print(rerun_sh, file=rerunfile)
    outputs_json = {
        "outputs": values_to_json(output_env, namespace=target.name),
        "dir": rundir
    }
    print(json.dumps(outputs_json, indent=2))
    return outputs_json
Ejemplo n.º 9
0
#
""" Meta information
"""

import importlib_metadata

PROJECT_NAME = 've'

VERSION = importlib_metadata.version(PROJECT_NAME)

# EOF
Ejemplo n.º 10
0
 def test_zip_version_does_not_match(self):
     with self.assertRaises(PackageNotFoundError):
         version('definitely-not-installed')
Ejemplo n.º 11
0
 def test_case_insensitive(self):
     self.assertEqual(version('Example'), '21.12')
Ejemplo n.º 12
0
from __future__ import absolute_import
from __future__ import unicode_literals

import importlib_metadata  # TODO: importlib.metadata py38?

CONFIG_FILE = '.pre-commit-config.yaml'
MANIFEST_FILE = '.pre-commit-hooks.yaml'

YAML_DUMP_KWARGS = {
    'default_flow_style': False,
    # Use unicode
    'encoding': None,
    'indent': 4,
}

# Bump when installation changes in a backwards / forwards incompatible way
INSTALLED_STATE_VERSION = '1'
# Bump when modifying `empty_template`
LOCAL_REPO_VERSION = '1'

VERSION = importlib_metadata.version('pre_commit')

# `manual` is not invoked by any installed git hook.  See #719
STAGES = ('commit', 'prepare-commit-msg', 'commit-msg', 'manual', 'push')

DEFAULT = 'default'
Ejemplo n.º 13
0
 def test_retrieves_version_of_self(self):
     version = importlib_metadata.version('importlib_metadata')
     assert isinstance(version, str)
     assert re.match(self.version_pattern, version)
Ejemplo n.º 14
0
 def test_retrieves_version_of_pip(self):
     # Assume pip is installed and retrieve the version of pip.
     version = importlib_metadata.version('pip')
     assert isinstance(version, str)
     assert re.match(self.version_pattern, version)
Ejemplo n.º 15
0
"""The rubintv service."""

__all__ = ["__version__"]

import sys

if sys.version_info < (3, 8):
    from importlib_metadata import PackageNotFoundError, version
else:
    from importlib.metadata import PackageNotFoundError, version

__version__: str
"""The application version string (PEP 440 / SemVer compatible)."""

try:
    __version__ = version(__name__)
except PackageNotFoundError:
    # package is not installed
    __version__ = "0.0.0"
Ejemplo n.º 16
0
def main() -> None:
    parser = argparse.ArgumentParser(
        description='Generates SQLAlchemy model code from an existing database.'
    )
    parser.add_argument('url',
                        nargs='?',
                        help='SQLAlchemy url to the database')
    parser.add_argument('--version',
                        action='store_true',
                        help="print the version number and exit")
    parser.add_argument('--schema',
                        help='load tables from an alternate schema')
    parser.add_argument(
        '--tables', help='tables to process (comma-separated, default: all)')
    parser.add_argument('--noviews', action='store_true', help="ignore views")
    parser.add_argument('--noindexes',
                        action='store_true',
                        help='ignore indexes')
    parser.add_argument('--noconstraints',
                        action='store_true',
                        help='ignore constraints')
    parser.add_argument('--nojoined',
                        action='store_true',
                        help="don't autodetect joined table inheritance")
    parser.add_argument(
        '--noinflect',
        action='store_true',
        help="don't try to convert tables names to singular form")
    parser.add_argument('--noclasses',
                        action='store_true',
                        help="don't generate classes, only tables")
    parser.add_argument('--nocomments',
                        action='store_true',
                        help="don't render column comments")
    parser.add_argument('--outfile',
                        help='file to write output to (default: stdout)')
    args = parser.parse_args()

    if args.version:
        print(version('sqlacodegen'))
        return
    if not args.url:
        print('You must supply a url\n', file=sys.stderr)
        parser.print_help()
        return

    # Use reflection to fill in the metadata
    engine = create_engine(args.url)
    metadata = MetaData(engine)
    tables = args.tables.split(',') if args.tables else None
    metadata.reflect(engine, args.schema, not args.noviews, tables)

    # Write the generated model code to the specified file or standard output
    outfile = io.open(args.outfile, 'w',
                      encoding='utf-8') if args.outfile else sys.stdout
    generator = CodeGenerator(metadata,
                              args.noindexes,
                              args.noconstraints,
                              args.nojoined,
                              args.noinflect,
                              args.noclasses,
                              nocomments=args.nocomments)
    generator.render(outfile)
Ejemplo n.º 17
0
import json

classes = np.array([], dtype=object)
source = np.array([])

title = np.array([])
model = np.array([])
discription = []
url = np.array([])
id = np.array([])
sourcecode = np.array([])

# discription, type
contentjson = json.load(open('src\\content.json'))
for i in range(len(contentjson)):
  if version('aiinpy') in contentjson[i].values(): 
    discription.append(contentjson[i]['discription'])

for name, obj in inspect.getmembers(ai):
  if inspect.isclass(obj):
    classes = np.append(classes, obj)

for i in range(len(classes)):
  # title, url, id, sourcecode
  title = np.append(title, 'aiinpy.' + classes[i].__name__)
  url = np.append(url, '/' + version('aiinpy') + '/' + classes[i].__name__)
  id = np.append(id, classes[i].__name__ )
  sourcecode = np.append(sourcecode, 'https://github.com/seanmabli/aiinpy/blob/' + version('aiinpy') + '/aiinpy/' + classes[i].__name__ + '.py')

  # model
  source = np.append(source, inspect.getsource(classes[i]))
Ejemplo n.º 18
0
)
from sagemaker.amazon.factorization_machines import FactorizationMachinesPredictor  # noqa: F401
from sagemaker.amazon.ntm import NTM, NTMModel, NTMPredictor  # noqa: F401
from sagemaker.amazon.randomcutforest import (  # noqa: F401
    RandomCutForest, RandomCutForestModel, RandomCutForestPredictor,
)
from sagemaker.amazon.knn import KNN, KNNModel, KNNPredictor  # noqa: F401
from sagemaker.amazon.object2vec import Object2Vec, Object2VecModel  # noqa: F401
from sagemaker.amazon.ipinsights import (  # noqa: F401
    IPInsights, IPInsightsModel, IPInsightsPredictor,
)

from sagemaker.algorithm import AlgorithmEstimator  # noqa: F401
from sagemaker.analytics import TrainingJobAnalytics, HyperparameterTuningJobAnalytics  # noqa: F401
from sagemaker.local.local_session import LocalSession  # noqa: F401

from sagemaker.model import Model, ModelPackage  # noqa: F401
from sagemaker.pipeline import PipelineModel  # noqa: F401
from sagemaker.predictor import RealTimePredictor  # noqa: F401
from sagemaker.processing import Processor, ScriptProcessor  # noqa: F401
from sagemaker.session import Session  # noqa: F401
from sagemaker.session import container_def, pipeline_container_def  # noqa: F401
from sagemaker.session import production_variant  # noqa: F401
from sagemaker.session import s3_input  # noqa: F401
from sagemaker.session import get_execution_role  # noqa: F401

from sagemaker.automl.automl import AutoML, AutoMLJob, AutoMLInput  # noqa: F401
from sagemaker.automl.candidate_estimator import CandidateEstimator, CandidateStep  # noqa: F401

__version__ = importlib_metadata.version("sagemaker")
Ejemplo n.º 19
0
def _version():
    pyversion = version("commodore")
    if f"v{pyversion}" != __git_version__:
        return f"{pyversion} (Git version: {__git_version__})"
    return pyversion
Ejemplo n.º 20
0
from importlib_metadata import version, PackageNotFoundError

from flexmeasures.data.models.annotations import Annotation  # noqa F401
from flexmeasures.data.models.user import Account, AccountRole, User  # noqa F401
from flexmeasures.data.models.data_sources import DataSource as Source  # noqa F401
from flexmeasures.data.models.generic_assets import (  # noqa F401
    GenericAsset as Asset,
    GenericAssetType as AssetType,
)
from flexmeasures.data.models.time_series import Sensor  # noqa F401


__version__ = "Unknown"

# This uses importlib.metadata behaviour added in Python 3.8
# and relies on setuptools_scm.
try:
    __version__ = version("flexmeasures")
except PackageNotFoundError:
    # package is not installed
    pass
Ejemplo n.º 21
0
"""Init of weatherov module."""

from .weather import Weather
from .plotting import plot

# from importlib.metadata import version  # only for python 3.8+
from importlib_metadata import version

__version__ = version('weatho')
__author__ = 'Olivier Vincent'
__license__ = '3-Clause BSD'
Ejemplo n.º 22
0
# The MIT License (MIT)
#
# Copyright (c) 2015-present, Xiaoyou Chen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import importlib_metadata

from .gateway import CtpGateway

try:
    __version__ = importlib_metadata.version("vnpy_ctp")
except importlib_metadata.PackageNotFoundError:
    __version__ = "dev"
Ejemplo n.º 23
0
# Used for compatibility
import hy  # noqa

from importlib_metadata import version
from os import makedirs

from .constants import STORES_DIR

makedirs(STORES_DIR, exist_ok=True)

__version__ = version('slot')
Ejemplo n.º 24
0
def execute_pipeline_from_config(config: Dict,
                                 post_stage: Callable[[Dict],
                                                      None] = _null_function,
                                 **kwargs) -> Dict:
    original_config = deepcopy(config)

    check_required(config, ["global"])

    # !! pop = remove from config!
    global_parameters = config.pop('global')

    check_required(global_parameters, ["prefix", "sequences_file"])

    file_manager = get_file_manager(**global_parameters)

    # Make sure prefix exists
    prefix = global_parameters['prefix']

    # If prefix already exists
    if file_manager.exists(prefix):
        if not kwargs.get('overwrite'):
            raise FileExistsError(
                "The prefix already exists & no overwrite option has been set.\n"
                "Either set --overwrite, or move data from the prefix.\n"
                "Prefix: {}".format(prefix))
    else:
        # create the prefix
        file_manager.create_prefix(prefix)

    # Copy original config to prefix
    global_in = file_manager.create_file(prefix,
                                         None,
                                         _IN_CONFIG_NAME,
                                         extension='.yml')
    write_config_file(global_in, original_config)

    # This downloads sequences_file if required
    download_files_for_stage(global_parameters, file_manager, prefix)

    global_parameters = _process_fasta_file(**global_parameters)

    for stage_name in config:
        stage_parameters = config[stage_name]
        original_stage_parameters = dict(**stage_parameters)

        check_required(stage_parameters, ["protocol", "type"])

        stage_type = stage_parameters['type']
        stage_runnable = _STAGES.get(stage_type)

        if not stage_runnable:
            raise Exception(
                "No type defined, or invalid stage type defined: {}".format(
                    stage_type))

        # Prepare to run stage
        stage_parameters['stage_name'] = stage_name
        file_manager.create_stage(prefix, stage_name)

        stage_parameters = download_files_for_stage(stage_parameters,
                                                    file_manager, prefix,
                                                    stage_name)

        stage_dependency = stage_parameters.get('depends_on')
        stage_dependency_parameters = dict()

        if stage_dependency:
            if isinstance(stage_dependency, ordereddict):
                for stage_dependency_name in stage_dependency.keys():
                    if stage_dependency_name not in config:
                        raise Exception(
                            f"Stage {stage_name} depends on "
                            f"{stage_dependency_name}, but dependency not found in config."
                        )

                    previous_stage_parameters = config.get(
                        stage_dependency_name)
                    carryover_parameters = stage_dependency[
                        stage_dependency_name]

                    stage_dependency_parameters = {
                        **stage_dependency_parameters,
                        **{
                            parameter: previous_stage_parameters[parameter]
                            for parameter in carryover_parameters if parameter in previous_stage_parameters.keys(
                            )
                        }
                    }
            elif isinstance(stage_dependency, list):
                for stage_dependency_name in stage_dependency:
                    if stage_dependency_name not in config:
                        raise Exception(
                            f"Stage {stage_name} depends on "
                            f"{stage_dependency_name}, but dependency not found in config."
                        )
                    stage_dependency_parameters = {
                        **stage_dependency_parameters,
                        **config.get(stage_dependency_name)
                    }
            elif isinstance(stage_dependency, str):
                if stage_dependency not in config:
                    raise Exception(
                        f"Stage {stage_name} depends on "
                        f"{stage_dependency}, but dependency not found in config."
                    )

                stage_dependency_parameters = config.get(stage_dependency)
            else:
                raise Exception(
                    f"Stage {stage_name} depends on unknown strategy: {stage_dependency}."
                )

            stage_parameters = {
                **global_parameters,
                **stage_dependency_parameters,
                **stage_parameters
            }
        else:
            stage_parameters = {**global_parameters, **stage_parameters}

        # Register start time
        start_time = datetime.now().astimezone()
        stage_parameters['start_time'] = str(start_time)

        stage_in = file_manager.create_file(prefix,
                                            stage_name,
                                            _IN_CONFIG_NAME,
                                            extension='.yml')
        write_config_file(stage_in, stage_parameters)

        try:
            stage_output_parameters = stage_runnable(**stage_parameters)
        except Exception as e:
            # We are pretty sure that it's wrong configuration and not a bug
            if isinstance(e, InvalidParameterError):
                raise

            # Tell the user which stage failed and show an url to report an error on github
            try:
                version = importlib_metadata.version("bio_embeddings")
            except PackageNotFoundError:
                version = "unknown"

            # Make a github flavored markdown table; the header is in the template
            parameter_table = "\n".join(
                f"{key}|{value}"
                for key, value in original_stage_parameters.items())
            params = {
                # https://stackoverflow.com/a/35498685/3549270
                "title":
                f"Protocol {original_stage_parameters['protocol']}: {type(e).__name__}: {e}",
                "body":
                _ERROR_REPORTING_TEMPLATE.format(
                    version,
                    torch.cuda.is_available(),
                    parameter_table,
                    traceback.format_exc(10),
                ),
            }
            print(traceback.format_exc(), file=sys.stderr)
            print(
                f"Consider reporting this error at this url: {_ISSUE_URL}?{urllib.parse.urlencode(params)}\n\n"
                f"Stage {stage_name} failed.",
                file=sys.stderr,
            )

            sys.exit(1)

        # Register end time
        end_time = datetime.now().astimezone()
        stage_output_parameters['end_time'] = str(end_time)

        # Register elapsed time
        stage_output_parameters['elapsed_time'] = str(end_time - start_time)

        stage_out = file_manager.create_file(prefix,
                                             stage_name,
                                             _OUT_CONFIG_NAME,
                                             extension='.yml')
        write_config_file(stage_out, stage_output_parameters)

        # Store in global_out config for later retrieval (e.g. depends_on)
        config[stage_name] = stage_output_parameters

        # Execute post-stage function, if provided
        post_stage(stage_output_parameters)

    config['global'] = global_parameters

    try:
        config['global']['version'] = importlib_metadata.version(
            "bio_embeddings")
    except PackageNotFoundError:
        pass  # :(

    global_out = file_manager.create_file(prefix,
                                          None,
                                          _OUT_CONFIG_NAME,
                                          extension='.yml')
    write_config_file(global_out, config)

    return config
Ejemplo n.º 25
0
HUB_DATASETS_URL = HF_ENDPOINT + "/datasets/{path}/resolve/{revision}/{name}"
HUB_DEFAULT_VERSION = "main"

PY_VERSION = version.parse(platform.python_version())

if PY_VERSION < version.parse("3.8"):
    import importlib_metadata
else:
    import importlib.metadata as importlib_metadata

# General environment variables accepted values for booleans
ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"})

# Imports
PYARROW_VERSION = version.parse(importlib_metadata.version("pyarrow"))

USE_TF = os.environ.get("USE_TF", "AUTO").upper()
USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper()
USE_JAX = os.environ.get("USE_JAX", "AUTO").upper()

TORCH_VERSION = "N/A"
TORCH_AVAILABLE = False

if USE_TORCH in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TF not in ENV_VARS_TRUE_VALUES:
    TORCH_AVAILABLE = importlib.util.find_spec("torch") is not None
    if TORCH_AVAILABLE:
        try:
            TORCH_VERSION = version.parse(importlib_metadata.version("torch"))
            logger.info(f"PyTorch version {TORCH_VERSION} available.")
        except importlib_metadata.PackageNotFoundError:
Ejemplo n.º 26
0
 def test_white_list_contents_with_cors_header_version(self, *args):  # pylint: disable=unused-argument
     """ Verify that with django-cor-header<3 it loads list without scheme. """
     assert settings.CORS_ORIGIN_WHITELIST == ['sandbox.edx.org']
     assert int(version('django_cors_headers').split('.')[0]) == 2
Ejemplo n.º 27
0
    DreameVacuum,
    G1Vacuum,
    RoborockVacuum,
    RoidmiVacuumMiot,
    VacuumException,
    ViomiVacuum,
)
from miio.integrations.vacuum.roborock.vacuumcontainers import (
    CleaningDetails,
    CleaningSummary,
    ConsumableStatus,
    DNDStatus,
    Timer,
    VacuumStatus,
)
from miio.powerstrip import PowerStrip
from miio.protocol import Message, Utils
from miio.pwzn_relay import PwznRelay
from miio.scishare_coffeemaker import ScishareCoffee
from miio.toiletlid import Toiletlid
from miio.walkingpad import Walkingpad
from miio.waterpurifier import WaterPurifier
from miio.waterpurifier_yunmi import WaterPurifierYunmi
from miio.wifirepeater import WifiRepeater
from miio.wifispeaker import WifiSpeaker
from miio.yeelight_dual_switch import YeelightDualControlModule

from miio.discovery import Discovery

__version__ = version("python-miio")
Ejemplo n.º 28
0
import regex as re
from heavydb._parsers import _extract_column_details
from heavydb.dtypes import TDatumType as heavydb_dtype
from ibis.backends.base import Database
from ibis.backends.base.sql import BaseSQLBackend
from ibis.backends.base.sql.compiler import DDL, DML
from importlib_metadata import PackageNotFoundError, version

from . import ddl
from . import dtypes as heavydb_dtypes
from .client import HeavyDBDataType, HeavyDBTable, get_cursor_class
from .compiler import HeavyDBCompiler
from .udf import HeavyDBUDF

try:
    __version__ = version("ibis_heavyai")
except PackageNotFoundError:
    __version__ = ""

try:
    from cudf import DataFrame as GPUDataFrame  # noqa
except (ImportError, OSError):
    GPUDataFrame = None

__all__ = ('Backend', "__version__")


class Backend(BaseSQLBackend):
    """When the backend is loaded, this class becomes `ibis.heavyai`."""

    name = 'heavyai'
Ejemplo n.º 29
0
# -*- coding: utf-8 -*-
import cellrank.pl
import cellrank.tl
import cellrank.ul
import cellrank.logging
import cellrank.datasets
from cellrank.settings import settings as settings
from cellrank.tl._read import read
from cellrank.tl._constants import Lin

__author__ = ", ".join(["Marius Lange", "Michal Klein"])
__maintainer__ = ", ".join(["Marius Lange", "Michal Klein"])
__version__ = "1.1.0"
__email__ = "*****@*****.**"

try:
    from importlib_metadata import version  # Python < 3.8
except ImportError:
    from importlib.metadata import version  # Python = 3.8

from packaging.version import parse

__full_version__ = parse(version(__name__))
__full_version__ = (
    f"{__version__}+{__full_version__.local}" if __full_version__.local else __version__
)

del version, parse
Ejemplo n.º 30
0
def main() -> None:
    generators = {
        ep.name: ep
        for ep in entry_points(group="sqlacodegen.generators")
    }
    parser = argparse.ArgumentParser(
        description="Generates SQLAlchemy model code from an existing database."
    )
    parser.add_argument("url",
                        nargs="?",
                        help="SQLAlchemy url to the database")
    parser.add_argument("--option",
                        nargs="*",
                        help="options passed to the generator class")
    parser.add_argument("--version",
                        action="store_true",
                        help="print the version number and exit")
    parser.add_argument(
        "--schemas",
        help="load tables from the given schemas (comma separated)")
    parser.add_argument(
        "--generator",
        choices=generators,
        default="declarative",
        help="generator class to use",
    )
    parser.add_argument(
        "--tables", help="tables to process (comma-separated, default: all)")
    parser.add_argument("--noviews", action="store_true", help="ignore views")
    parser.add_argument("--outfile",
                        help="file to write output to (default: stdout)")
    args = parser.parse_args()

    if args.version:
        print(version("sqlacodegen"))
        return
    if not args.url:
        print("You must supply a url\n", file=sys.stderr)
        parser.print_help()
        return

    # Use reflection to fill in the metadata
    engine = create_engine(args.url)
    metadata = MetaData()
    tables = args.tables.split(",") if args.tables else None
    schemas = args.schemas.split(",") if args.schemas else [None]
    for schema in schemas:
        metadata.reflect(engine, schema, not args.noviews, tables)

    # Instantiate the generator
    generator_class = generators[args.generator].load()
    generator = generator_class(metadata, engine, set(args.option or ()))

    # Open the target file (if given)
    with ExitStack() as stack:
        outfile: TextIO
        if args.outfile:
            outfile = open(args.outfile, "w", encoding="utf-8")
            stack.enter_context(outfile)
        else:
            outfile = sys.stdout

        # Write the generated model code to the specified file or standard output
        outfile.write(generator.generate())
Ejemplo n.º 31
0
ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"})


# Imports
USE_TF = os.environ.get("USE_TF", "AUTO").upper()
USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper()

TORCH_VERSION = "N/A"
TORCH_AVAILABLE = False

if USE_TORCH in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TF not in ENV_VARS_TRUE_VALUES:
    TORCH_AVAILABLE = importlib.util.find_spec("torch") is not None
    if TORCH_AVAILABLE:
        try:
            TORCH_VERSION = importlib_metadata.version("torch")
            logger.info(f"PyTorch version {TORCH_VERSION} available.")
        except importlib_metadata.PackageNotFoundError:
            pass
else:
    logger.info("Disabling PyTorch because USE_TF is set")

TF_VERSION = "N/A"
TF_AVAILABLE = False

if USE_TF in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TORCH not in ENV_VARS_TRUE_VALUES:
    TF_AVAILABLE = importlib.util.find_spec("tensorflow") is not None
    if TF_AVAILABLE:
        # For the metadata, we have to look for both tensorflow and tensorflow-cpu
        for package in [
            "tensorflow",
Ejemplo n.º 32
0
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import importlib_metadata


__version__ = importlib_metadata.version("mongo_connector")


# Maximum # of documents to process before recording timestamp
# default = -1 (no maximum)
DEFAULT_BATCH_SIZE = -1

# Interval in seconds between doc manager flushes (i.e. auto commit)
# default = None (never auto commit)
DEFAULT_COMMIT_INTERVAL = None

# Maximum # of documents to send in a single bulk request through a
# DocManager.
DEFAULT_MAX_BULK = 1000

# The default MongoDB field that will serve as the unique key for the
Ejemplo n.º 33
0
 def test_retrieves_version_of_distinfo_pkg(self):
     version = importlib_metadata.version('distinfo-pkg')
     assert isinstance(version, text)
     assert re.match(self.version_pattern, version)
Ejemplo n.º 34
0
else:
    import importlib.metadata as importlib_metadata

__all__ = ['is_tf_available', 'is_torch_available']

ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"})

USE_TF = os.environ.get("USE_TF", "AUTO").upper()
USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper()

if USE_TORCH in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TF not in ENV_VARS_TRUE_VALUES:
    _torch_available = importlib.util.find_spec("torch") is not None
    if _torch_available:
        try:
            _torch_version = importlib_metadata.version("torch")
            logging.info(f"PyTorch version {_torch_version} available.")
        except importlib_metadata.PackageNotFoundError:
            _torch_available = False
else:
    logging.info("Disabling PyTorch because USE_TF is set")
    _torch_available = False

if USE_TF in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TORCH not in ENV_VARS_TRUE_VALUES:
    _tf_available = importlib.util.find_spec("tensorflow") is not None
    if _tf_available:
        candidates = (
            "tensorflow",
            "tensorflow-cpu",
            "tensorflow-gpu",
            "tf-nightly",
        BulkIndexError: errors.OperationFailed,
        es_exceptions.ConnectionError: errors.ConnectionFailed,
        es_exceptions.TransportError: errors.OperationFailed,
        es_exceptions.NotFoundError: errors.OperationFailed,
        es_exceptions.RequestError: errors.OperationFailed,
    }
)

LOG = logging.getLogger(__name__)

DEFAULT_SEND_INTERVAL = 5
"""The default interval in seconds to send buffered operations."""

DEFAULT_AWS_REGION = "us-east-1"

__version__ = importlib_metadata.version("elastic2_doc_manager")


def convert_aws_args(aws_args):
    """Convert old style options into arguments to boto3.session.Session."""
    if not isinstance(aws_args, dict):
        raise errors.InvalidConfiguration(
            'Elastic DocManager config option "aws" must be a dict'
        )
    old_session_kwargs = dict(
        region="region_name",
        access_id="aws_access_key_id",
        secret_key="aws_secret_access_key",
    )
    new_kwargs = {}
    for arg in aws_args:
Ejemplo n.º 36
0
 def test_zip_version(self):
     self.assertEqual(importlib_metadata.version('example'), '21.12')