Пример #1
0
def data(version=None):
    file_type = request.args.get('file_type', 'csv')
    price = request.args.get('price', 0.05)
    headers = {
        'timestamp': 'Timestamp',
        'date': 'Date and Time',
        'max_consumption': 'MAX',
        'min_consumption': 'MIN',
        'guess_consumption': 'GUESS'
    }

    if version_parse(version) >= version_parse('v1.1.1'):
        headers = {
            'timestamp': 'Timestamp',
            'date': 'Date and Time',
            'max_power': 'power MAX, GW',
            'min_power': 'power MIN, GW',
            'guess_power': 'power GUESS, GW',
            'max_consumption': 'annualised consumption MAX, TWh',
            'min_consumption': 'annualised consumption MIN, TWh',
            'guess_consumption': 'annualised consumption GUESS, TWh'
        }

    rows = get_data(version, float(price))
    send_file_func = send_file(
        first_line=f'Average electricity cost assumption: {price} USD/kWh',
        file_type=file_type)

    return send_file_func(headers, rows)
    def latest_version_available(self, package_name):
        """
        checking for the latest available version of a package on Pypi
        :param package_name:
        :type package_name:
        :return:
        :rtype:
        """

        current_python_version = version_parse(
            f"{sys.version_info.major}.{sys.version_info.minor}"
            f".{sys.version_info.micro}")

        current_version = version_parse(ifat_version_installed)
        current_is_prerelease = (str(current_version)
                                 == str(ifat_version_installed)
                                 and current_version.is_prerelease)

        result = requests.get(f"https://pypi.org/pypi/{package_name}/json",
                              timeout=(5, 30))

        latest = None

        if result.status_code == requests.codes.ok:
            pypi_info = result.json()

            for release, release_details in pypi_info["releases"].items():
                release_detail = (release_details[-1]
                                  if len(release_details) > 0 else None)
                if not release_detail or (
                        not release_detail["yanked"] and
                    ("requires_python" not in release_detail
                     or not release_detail["requires_python"]
                     or current_python_version in SpecifierSet(
                         release_detail["requires_python"]))):
                    my_release = version_parse(release)

                    if str(my_release) == str(release) and (
                            current_is_prerelease
                            or not my_release.is_prerelease):
                        latest = release

            if not latest:
                self.stdout.write(
                    self.style.WARNING(
                        "Could not find a suitable release of '{package_name}'"
                        .format(package_name=package_name)))

            return latest

        self.stdout.write(
            self.style.WARNING(
                "Package '{package_name}' is not registered in PyPI".format(
                    package_name=package_name)))

        return None
Пример #3
0
 def check_docker_compose():
     import re
     from subprocess import check_output
     from packaging.version import parse as version_parse
     pattern = '(docker-compose version) ([0-9.]+(-rc[0-9])?)(, build [a-z0-9]+)'
     output = check_output('docker-compose --version', shell=True).decode('utf-8').strip()
     regex = re.compile(pattern)
     match = regex.search(output)
     version = match.groups()[1]
     assert version_parse(version) >= version_parse(MINIMUM_DOCKER_COMPOSE_VERSION)
Пример #4
0
def __scale_axes(axes, ax_type, which):
    """Set the axis scaling"""

    kwargs = dict()
    if which == 'x':
        if version_parse(matplotlib.__version__) < version_parse('3.3.0'):
            thresh = 'linthreshx'
            base = 'basex'
            scale = 'linscalex'
        else:
            thresh = 'linthresh'
            base = 'base'
            scale = 'linscale'

        scaler = axes.set_xscale
        limit = axes.set_xlim
    else:
        if version_parse(matplotlib.__version__) < version_parse('3.3.0'):
            thresh = 'linthreshy'
            base = 'basey'
            scale = 'linscaley'
        else:
            thresh = 'linthresh'
            base = 'base'
            scale = 'linscale'

        scaler = axes.set_yscale
        limit = axes.set_ylim

    # Map ticker scales
    if ax_type == 'mel':
        mode = 'symlog'
        kwargs[thresh] = 1000.0
        kwargs[base] = 2

    elif ax_type in ['cqt', 'cqt_hz', 'cqt_note', 'cqt_svara']:
        mode = 'log'
        kwargs[base] = 2

    elif ax_type in ['log', 'fft_note', 'fft_svara']:
        mode = 'symlog'
        kwargs[base] = 2
        # kwargs[thresh] = core.note_to_hz(
        #    'C2'
        # )  # in librosa/core.py but I don't think it is needed
        kwargs[scale] = 0.5

    elif ax_type in ['tempo', 'fourier_tempo']:
        mode = 'log'
        kwargs[base] = 2
        limit(16, 480)
    else:
        return

    scaler(mode, **kwargs)
Пример #5
0
def __scale_axes(axes, ax_type, which):
    """Set the axis scaling"""

    kwargs = dict()
    if which == "x":
        if version_parse(matplotlib.__version__) < version_parse("3.3.0"):
            thresh = "linthreshx"
            base = "basex"
            scale = "linscalex"
        else:
            thresh = "linthresh"
            base = "base"
            scale = "linscale"

        scaler = axes.set_xscale
        limit = axes.set_xlim
    else:
        if version_parse(matplotlib.__version__) < version_parse("3.3.0"):
            thresh = "linthreshy"
            base = "basey"
            scale = "linscaley"
        else:
            thresh = "linthresh"
            base = "base"
            scale = "linscale"

        scaler = axes.set_yscale
        limit = axes.set_ylim

    # Map ticker scales
    if ax_type == "mel":
        mode = "symlog"
        kwargs[thresh] = 1000.0
        kwargs[base] = 2

    elif ax_type in ["cqt", "cqt_hz", "cqt_note", "cqt_svara"]:
        mode = "log"
        kwargs[base] = 2

    elif ax_type in ["log", "fft_note", "fft_svara"]:
        mode = "symlog"
        kwargs[base] = 2
        kwargs[thresh] = core.note_to_hz("C2")
        kwargs[scale] = 0.5

    elif ax_type in ["tempo", "fourier_tempo"]:
        mode = "log"
        kwargs[base] = 2
        limit(16, 480)
    else:
        return

    scaler(mode, **kwargs)
Пример #6
0
def version_check(ns):
    if ns.version not in VERSIONS:
        version = api_version(ns)
        version = version.split('-')[0]
        if version_parse(version) >= version_parse(cli_version()):
            logging.debug('version_check: PASSED')
            return
        raise VersionCheckFailedError(version, cli_version())
    if ns.version == VERSIONS[0]:
        output({'cli-version': cli_version()})
    elif ns.version == VERSIONS[1]:
        output({'api-version': api_version(ns)})
    sys.exit(0)
Пример #7
0
def get_version(url=TOPALIAS_PYPI_LATEST_VERSION):
    """Return version of topalias package on pypi.org."""
    req = requests.get(url)
    version = version_parse("0")
    if req.status_code == requests.codes.ok:  # pylint: disable=E1101
        req.encoding = req.apparent_encoding
        j = json.loads(req.text.encode(req.encoding))
        releases = j.get("releases", [])
        for release in releases:
            ver = version_parse(release)
            if not ver.is_prerelease:
                version = max(version, ver)
    return version
Пример #8
0
def create_version_dict(os: str) -> Dict[str, str]:
    tarball_urls = get_tarball_urls()
    result = dict()

    for tarball_url in tarball_urls:
        version = re.findall(r'cmake-(([0-9.]+)(-rc[0-9]+)?)', tarball_url)[0][0]

        if (os == 'macos' and ('Darwin64' in tarball_url or 'Darwin-x86_64' in tarball_url)) \
                or (os == 'linux' and 'Linux-x86_64' in tarball_url):
            if version_parse(version).public not in result:
                result[version_parse(version).public] = tarball_url

    return result
Пример #9
0
def deploy_model(args, version=__version__):
    parsed_version = version_parse(version)
    assert parsed_version.major == 4

    if parsed_version.minor == 0:
        deploy_model_4_0(args, version)
    elif parsed_version.minor == 1:
        deploy_model_4_1(args, version)
Пример #10
0
    def test_different_dtypes(self):
        shape = (5, 5)
        num_elements = shape[0] * shape[1]

        for dtype in [np.float32, np.float64]:
            elements = np.random.random_sample(num_elements)
            elements = elements.astype(dtype, casting="same_kind")
            array = np.reshape(elements, newshape=shape)
            self.lossless_round_trip(array)

        if (version_parse is not None and
            (version_parse(np.__version__) >= version_parse("1.11.0"))
        ):
            for dtype in [np.int32, np.int64]:
                array = np.random.randint(2**30, size=shape, dtype=dtype)
                self.lossless_round_trip(array)
        else:
            array = np.random.randint(2**30, size=shape)
            self.lossless_round_trip(array)
Пример #11
0
    def docker_api(self) -> CheckResult:
        name = 'docker-api'
        if self._check_docker_command() is None:
            return self._failed(name=name, info='No such command: "docker"')

        version_info = self._get_docker_version_info()
        if not version_info:
            return self._failed(
                name=name,
                info='Docker api request failed. Is docker installed?')
        logger.debug('Docker version info %s', version_info)
        actual_version = version_info['ApiVersion']
        expected_version = self.requirements['docker-api']
        info = {
            'expected_version': expected_version,
            'actual_version': actual_version
        }
        if version_parse(actual_version) < version_parse(expected_version):
            return self._failed(name=name, info=info)
        else:
            return self._ok(name=name, info=info)
Пример #12
0
def check_and_migrate(filestore, auto_migrate=True):
    filestore_version = filestore.version if hasattr(filestore,
                                                     'version') else "0.0"
    if filestore_version != ots_version:
        if not auto_migrate:
            run_migration = click.confirm(
                f"Current filestore version is \"{filestore_version}\", "
                f"but your current version of OTS is \"{ots_version}\". "
                "Do you want to migrate the database?",
                default=False,
            )

            if not run_migration:
                raise click.ClickException(
                    "Migration aborted. To continue using ots, you need to either run "
                    f"the migration, or downgrade ots back to version \"{filestore_version}\"."
                )
            click.echo("Running required migrations...")
        else:
            click.echo(f"Filestore version not up-to-date with ots "
                       f"({filestore_version} < {ots_version}). "
                       f"Migrating database to current version...")

        filestore_v = version_parse(filestore_version)
        for mig_version, mig_func in get_migration_functions():
            if filestore_v < version_parse(mig_version):
                click.echo(
                    f"Running filestore migration to version \"{mig_version}\""
                )
                mig_func(filestore)

        # Update the filestore's `version` to be the current one.
        # Do it here instead of in the migration function itself to avoid
        # having to always create a migration function for a new version
        # if it wouldn't otherwise require one.
        click.secho(f"Filestore migrated to version {ots_version}",
                    fg='green',
                    bold=True)
        filestore.version = ots_version
Пример #13
0
def get_cmake_binaries(tools_dir: str) -> List[CMakeBinary]:
    binaries = []  # type: List[CMakeBinary]

    for filename in glob.glob(tools_dir + '/**/bin/cmake', recursive=True):
        try:
            version = re.findall(r'cmake-([^-]+)-', filename)[0]
            binaries.append(CMakeBinary(version, os.path.abspath(filename)))
        except IndexError:
            pass

    print('Found {count} CMake binaries from directory {tools_dir}\n'.format(
        count=len(binaries), tools_dir=tools_dir))
    return sorted(binaries, key=lambda x: version_parse(x.version))
Пример #14
0
def __parse_cve(cve_data: tp.Dict[str, tp.Any]) -> CVE:
    vulnerable_configurations = cve_data.get('vulnerable_configuration', [])
    if vulnerable_configurations and isinstance(vulnerable_configurations[0],
                                                str):
        vulnerable_versions = frozenset([
            version_parse(x.replace(':*', '').split(':')[-1])
            for x in vulnerable_configurations
        ])
    else:
        vulnerable_versions = frozenset([
            version_parse(x['title'].replace(':*', '').split(':')[-1])
            for x in vulnerable_configurations
        ])

    return CVE(cve_id=cve_data.get('id', None),
               score=cve_data.get('cvss', None),
               published=datetime.strptime(cve_data.get('Published', None),
                                           '%Y-%m-%dT%H:%M:%S'),
               vector=frozenset(cve_data.get('cvss-vector', '').split('/')),
               references=cve_data.get('references', None),
               summary=cve_data.get('summary', None),
               vulnerable_versions=vulnerable_versions)
Пример #15
0
    def docker_compose(self) -> CheckResult:
        name = 'docker-compose'
        cmd = shutil.which('docker-compose')
        if cmd is None:
            info = 'No such command: "docker-compose"'
            return self._failed(name=name, info=info)

        v_cmd_result = run_cmd(['docker-compose', '-v'], check_code=False)
        output = v_cmd_result.stdout.decode('utf-8').rstrip()
        if v_cmd_result.returncode != 0:
            output = v_cmd_result.stdout.decode('utf-8')
            info = f'Checking docker-compose version failed with: {output}'
            return self._failed(name=name, info=output)

        actual_version = output.split(',')[0].split()[-1].strip()
        expected_version = self.requirements['docker-compose']

        info = f'Expected docker-compose version {expected_version}, actual {actual_version}'  # noqa
        if version_parse(actual_version) < version_parse(expected_version):
            return self._failed(name=name, info=info)
        else:
            return self._ok(name=name, info=info)
Пример #16
0
    def add_package(self):
        assert self._attrs is not None
        assert self._data is not None

        base, ext = self.splitext(self._data)

        if ext not in SDIST_EXTS:
            return

        attrs = dict(self._attrs)

        version_str = self.get_version(base)
        if version_str is None:
            print(f"Unable to parse version from {base}, ignoring ...")
            return

        version = version_parse(version_str)
        url = self.get_url(attrs['href'])
        hash_type, hash = self.get_hash(attrs['href'])

        self.packages[version] = Package(self.base_name, version, Types.SDIST,
                                         url, hash_type, hash)
Пример #17
0
def test_jax_shape_ops():
    x_np = np.zeros((20, 3))
    x = Shape()(aet.as_tensor_variable(x_np))
    x_fg = FunctionGraph([], [x])

    compare_jax_and_py(x_fg, [], must_be_device_array=False)

    x = Shape_i(1)(aet.as_tensor_variable(x_np))
    x_fg = FunctionGraph([], [x])

    compare_jax_and_py(x_fg, [], must_be_device_array=False)


@pytest.mark.xfail(
    version_parse(jax.__version__) >= version_parse("0.2.12"),
    reason="Omnistaging cannot be disabled",
)
def test_jax_specify_shape():
    x_np = np.zeros((20, 3))
    x = SpecifyShape()(aet.as_tensor_variable(x_np), (20, 3))
    x_fg = FunctionGraph([], [x])

    compare_jax_and_py(x_fg, [])

    with config.change_flags(compute_test_value="off"):

        x = SpecifyShape()(aet.as_tensor_variable(x_np), (2, 3))
        x_fg = FunctionGraph([], [x])

        with pytest.raises(AssertionError):
Пример #18
0
 def __post_init__(self) -> None:
     # postgres uses 2-part semver
     assert version_parse(self.postgres_version) >= version_parse(
         "13.4"
     ), "Version must be >= 13.4"
Пример #19
0
)

missing = []
for required, modname, impname, minver, desc in modules:
    try:
        x = importlib.import_module(impname)
        import_ok, version_ok = True, False
        ver = getattr(x, '__version__', None)
        if ver is None:
            ver = getattr(x, 'version', None)
        if ver is not None and ' ' in ver:
            ver = ver.split(' ', 1)[0]
        if callable(ver): ver = ver()
        version_ok = True
        if HAS_PACKAGING and minver is not None and ver is not None:
            version_ok = version_parse(minver) <= version_parse(ver)
    except ImportError:
        import_ok, version_ok = False, True
    if not (import_ok and version_ok):
        if minver is None: minver = ''
        pref = '***' if required else ''
        missing.append(' {:3.3s} {:18.18s} {:8.8s}  {:s}'.format(
            pref, modname, minver, desc))

## For Travis-CI, need to write a local site config file
##
if os.environ.get('TRAVIS_CI_TEST', '0') == '1':
    time.sleep(0.2)

pjoin = os.path.join
pexists = os.path.exists
Пример #20
0
        limit_val_batches=0.1,
    )

    model = EvalModelTemplate()
    tpipes.run_model_test(trainer_options, model, on_gpu=False)

    # test freeze on cpu
    model.freeze()
    model.unfreeze()


@pytest.mark.skipif(platform.system() == "Windows",
                    reason="Distributed training is not supported on Windows")
@pytest.mark.skipif(
    (platform.system() == "Darwin"
     and version_parse(torch.__version__) < version_parse("1.3.0")),
    reason="Distributed training is not supported on MacOS before Torch 1.3.0")
def test_multi_cpu_model_ddp(tmpdir):
    """Make sure DDP works."""
    tutils.set_random_master_port()

    trainer_options = dict(
        default_root_dir=tmpdir,
        progress_bar_refresh_rate=0,
        max_epochs=1,
        limit_train_batches=0.4,
        limit_val_batches=0.2,
        gpus=None,
        num_processes=2,
        distributed_backend='ddp_cpu',
    )
Пример #21
0
def list_detail_download(cache_dir, releases):
    details = {}

    binary_regex = re.compile(BINARY_REGEX)
    binary_gcc_regex = re.compile(BINARY_INTEREST_GCC)
    ttl_never = timedelta(days=300)  # Should never change.
    ttl_retry = timedelta(hours=4)  # While waiting for snapshot.
    for release in releases:
        details_release = {}

        url = snapshot_url(release, 'disk')
        disk_path = request_cached_path(url, cache_dir)
        if path.exists(disk_path) and not os.stat(disk_path)[stat.ST_SIZE]:
            logger.debug('using retry ttl for %s disk file', release)
            disk_ttl = ttl_retry
        else:
            disk_ttl = ttl_never
        disk = request_cached(url, cache_dir, disk_ttl).strip().splitlines()

        if len(disk) != 2:
            # Skip for now and retry later.
            logger.debug('skipping %s due to invalid disk file', release)

            if len(disk) != 0:
                # Clear cache file to indicate invalid.
                open(disk_path, 'w').write('')

            continue

        details_release['disk_base'] = sizeof_fmt(int(disk[0].split('\t')[0]))
        details_release['binary_unique_count'] = int(disk[1].split(' ')[0])
        details_release['disk_shared'] = 'unknown'

        url = snapshot_url(release, 'rpm.list')
        binaries = request_cached(url, cache_dir,
                                  ttl_never).strip().splitlines()
        details_release['binary_count'] = len(binaries)

        binary_interest = {}
        for binary in binaries:
            binary_match = binary_regex.match(path.basename(binary))
            if not binary_match:
                continue

            binary_name = binary_match.group('name')
            # Include all packages of interest and any gcc\d+ package to be filtered later.
            if not (binary_name in BINARY_INTEREST
                    or binary_gcc_regex.match(binary_name)):
                continue

            # When multiple verisons of the same binary are present ensure the latest version wins.
            if (binary_name not in binary_interest
                    or version_parse(binary_interest[binary_name]) <
                    version_parse(binary_match.group('version'))):
                binary_interest[binary_name] = binary_match.group('version')

        # Assuming the default gcc version is found filter major gcc packages to near the version.
        if 'gcc' in binary_interest:
            gcc_major_version = int(binary_interest['gcc'])
            gcc_major_versions = [
                gcc_major_version - 1, gcc_major_version, gcc_major_version + 1
            ]
            binary_interest_filtered = {}
            for binary_name, binary_version in binary_interest.items():
                match = binary_gcc_regex.match(binary_name)
                if match:
                    if int(match.group(
                            'major_version')) not in gcc_major_versions:
                        continue

                binary_interest_filtered[binary_name] = binary_version

            binary_interest = binary_interest_filtered

        details_release['binary_interest'] = binary_interest

        url = snapshot_url(release, 'rpm.unique.list')
        binaries = request_cached(url, cache_dir,
                                  ttl_never).strip().splitlines()

        binary_interest_changed = set()
        for binary in binaries:
            binary_match = binary_regex.match(path.basename(binary))
            if binary_match and binary_match.group('name') in binary_interest:
                binary_interest_changed.add(binary_match.group('name'))

        details_release['binary_interest_changed'] = list(
            sorted(binary_interest_changed))

        details[release] = details_release

    return details
Пример #22
0
    parser.add_argument('--os', help='OS to download CMake for (default: {default_os})'.format(default_os=default_os),
                        choices=['macos', 'linux', 'windows'], default=default_os)
    parser.add_argument('--latest_release', action='store_true',
                        help='only download the latest release (default: False)')
    parser.add_argument('--latest_patch', action='store_true',
                        help='only download the latest patch version for each release (default: False)')
    parser.add_argument('--first_minor', action='store_true',
                        help='only download the first minor version for each release (default: False)')
    parser.add_argument('--release_candidates', action='store_true',
                        help='also consider release candidates (default: False)')
    parser.add_argument('--tools_directory', metavar='DIR', default='tools',
                        help='path to the CMake binaries (default: "tools")')
    args = parser.parse_args()

    version_dict = create_version_dict(os=args.os)
    versions = sorted([version_parse(version) for version in version_dict.keys()])

    if not args.release_candidates:
        versions = [version for version in versions if not version.is_prerelease]

    if args.latest_patch:
        result = []
        for major, minor in set([(version.major, version.minor) for version in versions]):
            result.append([version for version in versions if version.major == major and version.minor == minor][-1])
        versions = sorted(result)

    if args.first_minor:
        result = []
        for major, minor in set([(version.major, version.minor) for version in versions]):
            result.append([version for version in versions if version.major == major and version.minor == minor][0])
        versions = sorted(result)
Пример #23
0
def install_server(
    base_url: furl.furl,
    file: typing.Optional[pathlib.Path],
    mpac_id: typing.Optional[str],
    mpac_key: typing.Optional[str],
    interactive: typing.Optional[bool],
    reinstall: typing.Optional[bool],
):
    try:
        if file is not None:
            plugin_path = file
        elif mpac_id is not None:
            id, version = download.split_name_and_version(mpac_id)
            logging.info("Downloading app %s (%s)...", id, version)
            plugin_path = download.download_app_by_marketplace_id(id, version)
            logging.info("Successfully downloaded app to %s", plugin_path)
        elif mpac_key is not None:
            key, version = download.split_name_and_version(mpac_key)
            logging.info("Downloading app %s (%s)...", key, version)
            plugin_path = download.download_app_by_app_key(key, version)
            logging.info("Successfully downloaded app to %s", plugin_path)
        else:
            try:
                plugin_path = pathutil.get_jar_path_from_pom()
            except FileNotFoundError:
                logging.error("Could not find the plugin you want to install. Are you in a maven directory?")
                sys.exit(1)
    except (MpacAppNotFoundError, MpacAppVersionNotFoundError) as e:
        logging.error("Could not find the plugin or plugin version %s", e)
        sys.exit(1)
    except Exception as e:
        logging.error("An error occured while downloading an app from the marketplace %s", e)
        sys.exit(1)

    if interactive:
        confirm = input("Do you really want to upload and install the plugin? (y/N) ")
        if confirm.lower() != "y":
            sys.exit()

    upm = UpmApi(base_url)
    if plugin_path.suffix == ".obr":
        plugin_info = jar.get_plugin_info_from_obr_path(plugin_path)
    else:
        plugin_info = jar.get_plugin_info_from_jar_path(plugin_path)
    if reinstall:
        try:
            try:
                status = upm.uninstall_plugin(plugin_info.key)
            except requests.exceptions.ConnectionError:
                logging.error("Could not connect to host - check your base-url")
                sys.exit(1)
            except Exception as exc:
                logging.error("An error occured - check your credentials")
                logging.error("%s", exc)
                sys.exit(1)
            if status:
                logging.info("Plugin successfully uninstalled")
            else:
                logging.error("An error occurred. The plugin could not be uninstalled.")
        except (FileNotFoundError, zipfile.BadZipFile, KeyError, pathutil.PluginKeyNotFoundError):
            logging.error("Could not get the plugin key of the supplied jar - are you sure you want to upload a plugin, mate?")
    else:
        # WORKAROUND: replace -SNAPSHOT with .dev, to follow python versioning scheme
        # TODO: find a new library that can parse -SNAPSHOT correctly
        version_to_install = version_parse(plugin_info.version.replace("-SNAPSHOT", ".dev"))
        try:
            # WORKAROUND: replace -SNAPSHOT with .dev, to follow python versioning scheme
            # TODO: find a new library that can parse -SNAPSHOT correctly
            version_installed = version_parse(upm.get_plugin(plugin_info.key).version.replace("-SNAPSHOT", ".dev"))
            if version_installed > version_to_install:
                logging.warning(
                    f"Looks like you are trying to install a .jar with a lower version ({version_to_install}) than already "
                    f"installed ({version_installed}).\n"
                    "This will most likely fail. Use the --reinstall option to uninstall the plugin first."
                )
        except json.decoder.JSONDecodeError:
            # If we can't get the current plugin, this means that the plugin is installed for the first time.
            # In this case, we can just ignore the error and perceed
            pass

    displayed_base_url = base_url.copy().remove(username=True, password=True)
    logging.info(f"{pathlib.Path(plugin_path).name} will be uploaded to {displayed_base_url}")

    try:
        token = upm.get_token()
    except requests.exceptions.RequestException:
        logging.error("Could not connect to host - check your base-url")
        sys.exit(1)
    except KeyError:
        logging.error("UPM Token couldn't be retrieved; are your credentials correct?")
        sys.exit(1)

    try:
        with open(plugin_path, "rb") as plugin_file:
            files = {"plugin": plugin_file}
            with Progress(
                "[progress.description]{task.description}",
                "[[blue]{task.percentage:>3.0f}%[reset]]",
                BarColumn(bar_width=None, complete_style="blue", finished_style="blue"),
            ) as pbar:
                task = pbar.add_task("[blue]Installing...", total=100)
                progress, previous_request = upm.upload_plugin(files, token)
                while progress != 100:
                    progress, previous_request = upm.get_current_progress(previous_request)
                    pbar.update(task, completed=progress)
                    time.sleep(0.1)
    except requests.exceptions.RequestException:
        logging.error("An error occured while uploading plugin")
        sys.exit(1)
    except FileNotFoundError:
        logging.error("Could not find the plugin you want to install.")
        sys.exit(1)
    finally:
        for file in files.values():
            file.close()

    plugin_data = PluginDto.decode(previous_request)

    if plugin_data.enabled:
        status = "[green]enabled[reset]!"
    else:
        status = (
            "[red]disabled[reset]! \n"
            "You should check the logs of your Atlassian host to find out why your plugin was disabled."
        )
    all_nr, enabled, disabled = upm.module_status(previous_request)
    logging.info(
        f"plugin {plugin_data.name} ({plugin_data.key}, v{plugin_data.version}) uploaded"
        f" and {status} ({enabled} of {all_nr} modules enabled)"
    )
    if len(disabled) != 0 and len(disabled) != all_nr:
        for module in disabled:
            logging.info(f"   - {module.key} is disabled")
    elif len(disabled) == all_nr:
        logging.error(
            "Your plugin was installed successfully but all modules are disabled. This is often caused by problems such as"
            " importing services that are not properly defined in your atlassian-plugin.xml."
        )
        logging.error("Check the logs of your Atlassian host to find out more.")
Пример #24
0
def version_check(version):
    if version_parse(version.api) >= version_parse(version.cli):
        logging.debug('version_check: PASSED')
    else:
        raise VersionCheckFailedError(version.cli, version.api)
Пример #25
0
    def handle(self, *args, **options):
        """
        Ask before running ...
        :param args:
        :type args:
        :param options:
        :type options:
        :return:
        :rtype:
        """

        if imicusfat_installed():
            has_conflict = False

            self.stdout.write(
                self.style.SUCCESS("ImicusFAT module is active, let's go!")
            )

            self.stdout.write("Checking for potentially available updates ...")

            ifat_version_available = self.latest_version_available(
                package_name="allianceauth-imicusfat"
            )

            afat_version_available = self.latest_version_available(
                package_name="allianceauth-afat"
            )

            # Check if updates for ImicusFAT are available
            if ifat_version_available is not None:
                if version_parse(ifat_version_installed) < version_parse(
                    ifat_version_available
                ):
                    self.stdout.write(
                        self.style.WARNING(
                            "ImicusFAT is outdated. "
                            "Please update to the latest ImicusFAT version first."
                        )
                    )

                    self.stdout.write(
                        self.style.WARNING(
                            f"ImicusFAT version installed: {ifat_version_installed}"
                        )
                    )

                    self.stdout.write(
                        self.style.WARNING(
                            f"ImicusFAT version available: {ifat_version_available}"
                        )
                    )

                    has_conflict = True
            else:
                has_conflict = True

            # Check if updates for aFAT are available
            if afat_version_available is not None:
                if version_parse(afat_version_installed) < version_parse(
                    afat_version_available
                ):
                    self.stdout.write(
                        self.style.WARNING(
                            "aFAT is outdated. "
                            "Please update to the latest aFAT version first."
                        )
                    )

                    self.stdout.write(
                        self.style.WARNING(
                            f"aFAT version installed: {afat_version_installed}"
                        )
                    )

                    self.stdout.write(
                        self.style.WARNING(
                            f"aFAT version available: {afat_version_available}"
                        )
                    )

                    has_conflict = True
            else:
                has_conflict = True

            if has_conflict is False:
                self.stdout.write(
                    "Importing all FAT/FAT link data from ImicusFAT module. "
                    "This can only be done once during the very first installation. "
                    "As soon as you have data collected with your AFAT module, "
                    "this import will fail!"
                )

                user_input = get_input("Are you sure you want to proceed? (yes/no)?")

                if user_input == "yes":
                    self.stdout.write("Starting import. Please stand by.")
                    self._import_from_imicusfat()
                else:
                    self.stdout.write(self.style.WARNING("Aborted."))
        else:
            self.stdout.write(
                self.style.WARNING(
                    "ImicusFAT module is not active. "
                    "Please make sure you have it in your "
                    "INSTALLED_APPS in your local.py! "
                    "Aborting."
                )
            )
Пример #26
0
	def get_version(self, filebase: str) -> Version:
		match = self._re_version.search(filebase.lower())
		assert match, "Couldn't find version in %r" % filebase
		return version_parse(match.group(1))