Example #1
0
    def __init__(self, name='default', path=None):
        """Initialize the handler

        name: name of the file, without the extension
        """

        # Saving the name
        self._name = name

        # Move path in cache if set
        if path is not None:
            if not os.path.exists(path):
                raise NotInstalledError
            self._path = path

        # Read the version file
        version = self.read()

        # Initialize the version management
        semver = str(Version.coerce(version))
        Version.__init__(self, semver)

        # Make build and prerelease as list to improve UX
        self.build = list(self.build)
        self.prerelease = list(self.prerelease)
Example #2
0
def get_change_log_for_app(app, from_version, to_version):
	change_log_folder = os.path.join(frappe.get_app_path(app), "change_log")
	if not os.path.exists(change_log_folder):
		return

	from_version = Version(from_version)
	to_version = Version(to_version)
	# remove pre-release part
	to_version.prerelease = None

	major_version_folders = ["v{0}".format(i) for i in range(from_version.major, to_version.major + 1)]
	app_change_log = []

	for folder in os.listdir(change_log_folder):
		if folder in major_version_folders:
			for file in os.listdir(os.path.join(change_log_folder, folder)):
				version = Version(os.path.splitext(file)[0][1:].replace("_", "."))

				if from_version < version <= to_version:
					file_path = os.path.join(change_log_folder, folder, file)
					content = frappe.read_file(file_path)
					app_change_log.append([version, content])

	app_change_log = sorted(app_change_log, key=lambda d: d[0], reverse=True)

	# convert version to string and send
	return [[cstr(d[0]), d[1]] for d in app_change_log]
Example #3
0
def parse(top, root):
    path = os.path.relpath(root, top)

    parts = path.split(os.path.sep)

    if parts[0] in blacklist:
        print('WARNING: Ignoring \'{:s}\' as it is blacklisted.'.format(path), file=sys.stderr)
        return None

    props = parse_properties(os.path.join(root, 'source.properties'))
    name = {
        'add-ons': add_ons,
        'build-tools': build_tools,
        'docs': docs,
        'extras': extras,
        'platforms': platforms,
        'platform-tools': platform_tools,
        'samples': samples,
        'sources': sources,
        'system-images': system_images,
        'tools': tools
    }.get(parts[0], default)(props, parts)
    if not name:
        print("WARNING: Failed to parse package:", path, file=sys.stderr)
        return None
    return Package(parts[0], name, props['revision'], Version.coerce(props['revision']))
Example #4
0
def list_packages(android):
    packages = []
    separator = '----------'
    out = subprocess.check_output([android, 'list', 'sdk', '--all', '--extended'])
    fields = out.split(separator)[1:]
    p_id = re.compile('^id: (\d+) or "(.+)"$', flags=re.MULTILINE)
    p_revision = re.compile('[Rr]evision (.+)')
    p_type = re.compile('Type: (\w+)')
    for field in fields:
        m = p_id.search(field)
        if m is None:
            print("Failed to parse package ID:", field, file=sys.stderr)
            continue
        num, name = m.groups()
        m = p_revision.search(field)
        if m is None:
            print("Failed to parse revision:", field, file=sys.stderr)
            continue
        revision, = m.groups()
        revision = revision.replace(' (Obsolete)', '')
        semver = Version.coerce(revision)

        m = p_type.search(field)
        if m is None:
            print("Failed to parse type:", field, file=sys.stderr)
            continue
        ptype, = m.groups()
        category = categories[ptype]
        if category is None:
            print("Unrecognized type:", ptype, file=sys.stderr)
            category = ptype.lower()
        packages.append(Package(category, name, revision, semver, num))
    return packages
Example #5
0
 def __eq__(self, other):
     # Seems like a bit of a hack...
     if isinstance(other, SpecItem):
         return other == self
     if isinstance(other, (str, tuple, list)):
         other = AsdfVersion(other)
     return Version.__eq__(self, other)
Example #6
0
    def get_package(self, spec, parent_channels=()):

        res = req.get('https://pypi.python.org/pypi/%s/json' % spec.package)
        info = res.json()
        versions = {Version.coerce(v):r for v, r in info['releases'].items()}
        version = max(spec.version_spec.filter(versions.keys()))
        releases = versions[version]

        data = info['info']
        data['files'] = []

        for file_info in releases:
            file_info['basename'] = file_info['filename']
            file_info['attrs'] = {'packagetype': file_info['packagetype']}
            file_info['distribution_type'] = 'pypi'
            file_info['version'] = str(version)
            file_info['md5'] = file_info['md5_digest']

            data['files'].append(file_info)

        pkg = Package(self.env, data)

        self.fetch(pkg.file)

        with open(pkg.file.cache_path) as fileobj:
            _, _, data = pypi_inspect.inspect_pypi_package(pkg.file.cache_path, fileobj)
            file_info['dependencies'] = data.get('dependencies', [])

        return pkg
Example #7
0
File: util.py Project: scztt/qpm
def sort_versions(versions):
    sem_version_map = dict()
    for v in versions:
        sem_version_map[Version.coerce(v)] = v

    sorted_sem_versions = sorted(sem_version_map.keys())
    return map(lambda v: sem_version_map[v], sorted_sem_versions)
Example #8
0
File: util.py Project: scztt/qpm
def select_versions(spec, versions):
    spec = to_spec(spec)

    sem_version_map = dict()
    for v in versions:
        sem_version_map[Version.coerce(v)] = v

    selected_version = spec.select(sem_version_map.keys())
    return sem_version_map[selected_version]
def check_latest_version_property_value(context, property_name, expected_value):
    """Check if the latest_version property contains expected value."""
    value = read_property_value_from_gremlin_response(context, property_name)
    try:
        assert Version.coerce(value) >= Version(expected_value)
    except Exception:
        data, meta = get_results_from_gremlin(context)
        print("Metadata returned by Gremlin:")
        pprint.pprint(meta)
        print("Data returned by Gremlin:")
        pprint.pprint(data)
        raise
 def __init__(self, *args, **kwargs):
     requirement = kwargs.pop('requirement', None)
     super(PackageVersion, self).__init__(*args, **kwargs)
     if requirement is None:
         return
     self.raw = requirement.line
     self.package_name = requirement.name
     self.is_editable = requirement.editable
     if requirement.editable:
         self.url = requirement.uri
     else:
         # HACK: we only take the first version.
         self.current_version = Version.coerce(requirement.specs[0][1])
         self.url = package_url(requirement.name)
Example #11
0
def is_compatible_with_framework(version):
    """
    Returns ``True`` if the supplied version is compatible with the current framework version,
    otherwise the function returns ``False``. Evaluation of versions is performed
    using the `semantic_version`_-package:

    .. sourcecode:: Python

        is_compatible_with_framework('2.0.0')

    All whitespace is stripped from the string prior to evaluation.

    :param version: A version to validate against the framework version.
    :return: True if framework version is compatible with specification, False otherwise.

    .. _semantic_version: https://pypi.python.org/pypi/semantic_version/
    """
    if version is None:
        return None

    lewis_version = Version.coerce(__version__)

    return lewis_version == Version.coerce(version.strip())
Example #12
0
def gen_version(version_str):
    """Generates an :class:`Version` object

    takes a SemVer string and returns a :class:`Version`
    if not a proper SemVer string it coerces it

    Args:
        version_str (str): version string to use
    """
    try:
        ver = Version(version_str)
    except ValueError:
        ver = Version.coerce(version_str)
    return ver
Example #13
0
    def __init__(self, name, object_number):

        assert type(name) == self._name_class, "Wrong type: {}. Expected {}"\
            .format(type(name), self._name_class)

        self._on = object_number
        self._name = name

        if not self._name.type_is_compatible(self._on):
            raise TypeError("The name and the object number must be "+
                            "of compatible types: got {} and {}"
                            .format(type(name), type(object_number)))

        # Update the patch number to always be the revision
        nv = Version(self._name.version)

        nv.patch = int(self._on.revision)

        self._name.version = str(nv)

        self.locations = Locations(self)
        self.data = {}

        self.is_valid()
Example #14
0
def flowStart(action, increment):
	version = Version(VersionString)
	if increment == 'patch':
		version.patch += 1
	elif increment == 'minor':
		version.minor += 1
		version.patch = 0
	elif increment == 'major':
		version.major += 1
		version.minor = 0
		version.patch = 0
	else:
		raise Exception('unexpected increment target')

	newVersionString = "%d.%d.%d" % (version.major, version.minor, version.patch)
	print "running - git flow %s start %d.%d.%d" % (action, version.major, version.minor, version.patch)
	if subprocess.call(['git', 'flow', action, 'start', "%d.%d.%d" % (version.major, version.minor, version.patch)]) != 0:
		sys.exit()

	newContents = TargetContents[:VersionMatch.start(1)] + newVersionString + TargetContents[VersionMatch.end(1):]
	with open(TargetPath, 'w+') as fTarget:
		fTarget.write(newContents)

	subprocess.call(['git', 'add', TargetPath])
Example #15
0
def get_current_version(repo):
    latest = None
    for tag in repo.tags:
        v = tag.name
        if v.startswith('v.'):
            v = v[2:]
        elif v.startswith('v'):
            v = v[1:]

        v = Version.coerce(v)

        if not latest:
            latest = v
        else:
            if v > latest:
                latest = v
    return latest
Example #16
0
def update_version_number(update_level='patch'):
    """Update version number

    Returns a semantic_version object"""

    """Find current version"""
    temp_file = version_file().parent / ("~" + version_file().name)
    with open(str(temp_file), 'w') as g:
        with open(str(version_file()), 'r') as f:
            for line in f:
                version_matches = bare_version_re.match(line)
                if version_matches:
                    bare_version_str = version_matches.groups(0)[0]
                    if semantic_version.validate(bare_version_str):
                        current_version = Version(bare_version_str)
                        print("{}Current version is {}".format(" "*4, current_version))
                    else:
                        current_version = Version.coerce(bare_version_str)
                        if not text.query_yes_quit("{}I think the version is {}. Use it?".format(" "*4, current_version), default="yes"):
                            exit(colorama.Fore.RED + 'Please set an initial version number to continue')

                    """Determine new version number"""
                    if update_level is 'major':
                        current_version = current_version.next_major()
                    elif update_level is 'minor':
                        current_version = current_version.next_minor()
                    elif update_level is 'patch':
                        current_version = current_version.next_patch()
                    elif update_level is 'prerelease':
                        if not current_version.prerelease:
                            current_version = current_version.next_patch()
                            current_version.prerelease = ('dev', )
                    elif update_level is None:
                        # don't update version
                        pass
                    else:
                        exit(colorama.Fore.RED + 'Cannot update version in {} mode'.format(update_level))

                    print("{}New version is     {}".format(" "*4, current_version))

                    """Update version number"""
                    line = '__version__ = "{}"\n'.format(current_version)
                print(line, file=g, end="")
        #print('', file=g)  # add a blank line at the end of the file
    shutil.copyfile(str(temp_file), str(version_file()))
    os.remove(str(temp_file))
    return(current_version)
Example #17
0
    def get_versions(self):

        repo_owner, repo_name = self.url.rstrip('.git').rsplit('/', 2)[-2:]
        repo_ident = '%s/%s' % (repo_owner, repo_name)
        url = '%s/repos/%s/tags' % (self.BASE_URL, repo_ident)

        LOGGER.debug('Getting version list from %s ...', url)

        versions = OrderedDict()
        for version_data in get_json(url):
            version_name = version_data['name']
            version_num = Version.coerce(version_name.lstrip('v'), partial=True)
            versions[version_num] = {
                'name': version_name,
                'url_pack': version_data['tarball_url'],
                'url_root': '%s/%s/%s' % (self.RAW_URL, repo_ident, version_name),
            }

        return versions
Example #18
0
def parse(top, root):
    path = root[len(top):]
    parts = path.split(os.path.sep)
    props = parse_properties(os.path.join(root, 'source.properties'))
    name = {
        'add-ons': add_ons,
        'build-tools': build_tools,
        'docs': docs,
        'extras': extras,
        'platforms': platforms,
        'platform-tools': platform_tools,
        'samples': samples,
        'sources': sources,
        'system-images': system_images,
        'tools': tools
    }.get(parts[0], default)(props, parts)
    if not name:
        print("Package parse failed:", path, file=sys.stderr)
        return None
    return Package(parts[0], name, props['revision'], Version.coerce(props['revision']))
Example #19
0
 def __init__(self, *args, **kwargs):
     requirement = kwargs.pop('requirement', None)
     super(PackageVersion, self).__init__(*args, **kwargs)
     if requirement is None:
         return
     self.raw = requirement.line
     self.package_name = requirement.name
     self.is_editable = requirement.editable
     if requirement.editable:
         self.url = ''
         self.current_version = None
     else:
         # HACK: we only take the first version.
         try:
             self.current_version = Version.coerce(requirement.specs[0][1])
             self.is_parseable = True
         except ValueError as ex:
             self.current_version = None
             self.is_parseable = False
             logger.debug("Unparseable package version (%s): %s", requirement.specs[0][1], ex)
         self.url = pypi.package_url(requirement.name)
Example #20
0
import os
import re
from importlib.metadata import version

from semantic_version import Version

from . import api, database, spotify, utils

__version__ = str(Version.coerce(version(__package__)))
match = re.match(r'.+\-(\D+)(\d+)', __version__)
if match is not None:
    pre_release_str = match.group(1)
    pre_release_num = match.group(2)
    __version__ = __version__.replace(f'{pre_release_str}{pre_release_num}',
                                      f'{pre_release_str}.{pre_release_num}')

if os.getenv('APP_LAMBDA'):
    from mangum import Mangum

    from .api.main import app

    handler = Mangum(app)
Example #21
0
    def install_from_repo(repo,
                          name,
                          version_spec='*',
                          allow_upgrade=False,
                          _visited=None):
        """
        This method downloads a package satisfying spec.

        .. note ::
            The function waits until all of dependencies are installed.
            Run it as separate thread if possible.
        """

        if _visited is None:
            _visited = {}
            top_level = True
        else:
            top_level = False

        if name in _visited:
            log.warn("Cyclic dependency found when installing %r <-> %r", name,
                     _visited)
            return

        prev = LocalPackage.by_name(name)

        _version_spec = Spec(version_spec)
        satisfies_local = prev and Version(prev.version) in _version_spec

        if allow_upgrade or not satisfies_local:
            log.debug("Fetching releases for %r from %r...", name, repo)

            releases = _download(repo.url + '/releases?name=' +
                                 urllib2.quote(name)).read()
            releases = json.loads(releases)
            if not releases['success']:
                error = "Release not found on remote repository: %r on %r (error: %r)" % (
                    name, repo, releases['error'])
                raise Exception(error)

            releases = [
                release for release in releases['data']
                if Version(release['version']) in _version_spec
            ]

            if not releases:
                error = ""
                raise Exception()

            # select latest release
            release = releases[-1]
            downloading = None if (prev and release['version']
                                   == prev.version) else release['version']
        else:
            downloading = None

        if downloading:
            log.info('Collecting %s...', name)
            data = _download(repo.url + '/download?spec=' +
                             urllib2.quote(name) + '==' +
                             urllib2.quote(downloading)).read()
            io = StringIO(data)
            f = zipfile.ZipFile(io, 'r')

            info = json.load(f.open('info.json'))
            install_path = os.path.join(g['path']['packages'], info["_id"])

            # this ensures os.path.exists(install_path) == False
            # TODO: should we unload a already-loaded plugin?
            if prev:
                prev.remove()
                assert not os.path.exists(install_path)

            # XXX: edge case?
            removed = os.path.join(install_path, '.removed')
            if os.path.isfile(removed):
                os.unlink(removed)

            log.info('Extracting into %r...', install_path)
            f.extractall(install_path)

            # Initiate LocalPackage object
            pkg = LocalPackage(info['_id'], install_path, info['version'])
        else:
            pkg = prev

            log.info("Requirement already satisfied: %s%s", name,
                     '' if version_spec == '*' else version_spec)

        restart_required = pkg.metadata().get('restart_required', False)
        _visited[name] = (pkg.version, restart_required)

        # First, install dependencies
        # TODO: add version check
        for dep_name, dep_version_spec in pkg.metadata().get(
                'dependencies', {}).items():
            InstallablePackage.install_from_repo(repo, dep_name,
                                                 dep_version_spec,
                                                 allow_upgrade, _visited)

        if downloading:
            pkg.install()

        if not restart_required:
            pkg.load()

        if top_level:
            log.info(
                "Successfully installed %s",
                ' '.join('%s-%s' % (key, value[0])
                         for key, value in _visited.items()))

            delayed = [(key, value) for key, value in _visited.items()
                       if value[1]]
            if delayed:
                log.info(
                    "Plugins in the following packages will be loaded after restarting IDA."
                )
                log.info(
                    "  %s", " ".join('%s-%s' % (key, value[0])
                                     for key, value in delayed))

        return pkg
Example #22
0
class EslintTool(JsTool, JsonTool):
    ESLINT_TOOL_ID = "eslint"  # to-do: versioning?
    CONFIG_FILE_NAME = ".eslintrc.yml"
    PROJECT_NAME = "node-js"

    JS_NAME_PATTERN = re.compile(r".*\.(?:js|jsx|ts|tsx)\b")

    MANIFEST_PATH: Path = Path(__file__).parent.resolve() / "eslint"

    # Packages we always need no matter what.
    ALWAYS_NEEDED = {
        "eslint": Version("6.1.0"),
        "eslint-config-airbnb": Version("18.0.1"),
        "eslint-plugin-import": Version("2.18.2"),
        "eslint-plugin-jsx-a11y": Version("6.2.3"),
        "eslint-plugin-react": Version("7.14.3"),
        "eslint-plugin-react-hooks": Version("1.7.0"),
    }
    TYPESCRIPT_PACKAGES = {
        "@typescript-eslint/parser": Version("2.3.3"),
        "@typescript-eslint/eslint-plugin": Version("2.3.3"),
        "typescript": Version("3.6.4"),
    }

    # Never fire on 'window', etc.
    ALWAYS_INCLUDE_GLOBALS = ["browser", "commonjs", "es6", "node"]

    # Remaining environments are determined by inspecting package.json
    POSSIBLE_GLOBALS = [
        "applescript",
        "atomtest",
        "embertest",
        "greasemonkey",
        "jasmine",
        "jest",
        "jquery",
        "mango",
        "meteor",
        "mocha",
        "nashorn",
        "phantomjs",
        "prototypejs",
        "protractor",
        "qunit",
        "serviceworker",
        "shelljs",
        "webextensions",
    ]

    @property
    def parser_type(self) -> Type[Parser]:
        return EslintParser

    @classmethod
    def tool_id(self) -> str:
        return EslintTool.ESLINT_TOOL_ID

    @classmethod
    def tool_desc(cls) -> str:
        return "Identifies and reports on patterns in JavaScript and TypeScript"

    @property
    def install_location(self) -> Path:
        return self.base_path / constants.RESOURCE_PATH / "eslint"

    @property
    def project_name(self) -> str:
        project_deps = self._dependencies(location=self.base_path)
        all_used = [g for g in self.POSSIBLE_GLOBALS if g in project_deps]
        if self.__uses_typescript(project_deps):
            all_used.append("TypeScript")
        if self.__uses_react(project_deps):
            all_used.append("react")

        if all_used:
            fws = ", ".join(sorted(all_used))
            return f"{EslintTool.PROJECT_NAME} (with {fws})"
        return EslintTool.PROJECT_NAME

    @property
    def file_name_filter(self) -> Pattern:
        return self.JS_NAME_PATTERN

    @property
    def eslintrc_path(self) -> Path:
        return self.install_location / EslintTool.CONFIG_FILE_NAME

    def matches_project(self) -> bool:
        return (self.context.base_path / "package.json").exists()

    def __uses_typescript(self, deps: NpmDeps) -> bool:
        # ts dependency shouldn't be in main deps, but, if it is, ok
        return "typescript" in deps

    def __uses_react(self, deps: NpmDeps) -> bool:
        return "react" in deps.main  # react dependency must be in main deps

    def __copy_eslintrc(self, identifier: str) -> None:
        logging.info(f"Using {identifier} .eslintrc configuration")
        shutil.copy(self.MANIFEST_PATH / f".eslintrc-{identifier}.yml",
                    self.eslintrc_path)

    def _setup_env(self) -> None:
        self.install_location.mkdir(exist_ok=True, parents=True)
        eslint_package_path = self.install_location / "package.json"
        if not eslint_package_path.exists():
            logging.info("Creating eslint environment")
            shutil.copy(self.MANIFEST_PATH / "package.json",
                        eslint_package_path)

    def __add_globals(self, deps: NpmDeps) -> None:
        """Adds global environments to eslintrc"""
        node_globals = [g for g in self.POSSIBLE_GLOBALS if g in deps
                        ] + self.ALWAYS_INCLUDE_GLOBALS
        env_dict = dict((g, True) for g in node_globals)

        with self.eslintrc_path.open() as stream:
            rc = yaml.safe_load(stream)
        rc[RC_ENVIRONMENTS] = env_dict
        with self.eslintrc_path.open("w") as stream:
            yaml.safe_dump(rc, stream)

    def setup(self) -> None:
        self._setup_env()
        needed_packages: Dict[str, Version] = self.ALWAYS_NEEDED.copy()
        project_deps = self._dependencies(location=self.base_path)
        project_has_typescript = self.__uses_typescript(project_deps)
        project_has_react = self.__uses_react(project_deps)
        if project_has_typescript:
            needed_packages.update(self.TYPESCRIPT_PACKAGES)

        self._ensure_packages(needed_packages)
        self._ensure_node_version()

        # install .eslintrc.yml if necessary
        if not self.eslintrc_path.exists():
            logging.info(f"Installing {EslintTool.CONFIG_FILE_NAME}...")

            if project_has_react and project_has_typescript:
                self.__copy_eslintrc("react-and-typescript")
            elif project_has_react:
                self.__copy_eslintrc("react")
            elif project_has_typescript:
                self.__copy_eslintrc("typescript")
            else:
                self.__copy_eslintrc("default")

            self.__add_globals(project_deps)

    @staticmethod
    def raise_failure(cmd: List[str],
                      result: subprocess.CompletedProcess) -> None:
        # Tool returned fialure, or did not return json
        raise subprocess.CalledProcessError(result.returncode,
                                            cmd,
                                            output=result.stdout,
                                            stderr=result.stderr)

    def run(self, files: Iterable[str]) -> JsonR:
        disables = [
            arg for d in self.config.get("ignore", [])
            for arg in ["--rule", f"{d}: off"]
        ]
        cmd = [
            "./node_modules/eslint/bin/eslint.js",
            "--no-eslintrc",
            "--no-ignore",
            "-c",
            str(self.eslintrc_path),
            "-f",
            "json",
            "--ext",
            "js,jsx,ts,tsx",
            "--ignore-pattern",
            ".bento/",
            "--ignore-pattern",
            "node_modules/",
        ] + disables
        for f in files:
            cmd.append(os.path.abspath(f))
        result = self.execute(
            cmd,
            cwd=self.install_location,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            env={
                "TIMING": "1",
                **os.environ
            },
            check=False,
        )
        logging.debug(f"{self.tool_id()}: stderr:\n" + result.stderr[0:4000])
        logging.debug(f"{self.tool_id()}: stdout:\n" + result.stdout[0:4000])

        # Return codes:
        # 0 = no violations, 1 = violations, 2+ = tool failure
        if result.returncode > 1:
            self.raise_failure(cmd, result)

        try:
            # TODO: this double-parses, which we can avoid in the future by having type-parameterized parsers
            lines = result.stdout.split("\n")
            data = lines[0]
            timing = "\n".join(lines[1:])
            logging.debug(f"r2c.eslint: TIMING:\n{timing}")
            return json.loads(data.strip())
        except Exception as ex:
            logging.error("Could not parse json output of eslint tool", ex)
            self.raise_failure(cmd, result)
            return []  # Unreachable, but mypy is poor
Example #23
0
 def minimum_client_version(cls) -> Version:
     return Version("2.2.8")
Example #24
0
def test_should_pull() -> None:
    assert _should_pull_analyzer(
        SpecifiedAnalyzer(
            VersionedAnalyzer(AnalyzerName("doesnt/exist"), Version("9.1.1"))
        )
    )
Example #25
0
 def version_build(self, value):  
     v = Version(self.version)
     v.build = value
     self.version = str(v)
Example #26
0
def test_box_metadata_get_add_index(version, version_list, insert_expected, match_expected):
    test_version = parse_version(version)
    test_version_list = [Version(val) for val in version_list]
    insert_at, match_at = get_version_index(test_version, test_version_list)
    assert insert_at == insert_expected
    assert match_at == match_expected
Example #27
0
 def version_major(self, value):  
     v = Version(self.version)
     v.major = int(value)
     self.version = str(v)
Example #28
0
def parse_version(version_string):
    """Parse a string into a PackageVersion."""
    try:
        return Version.coerce(version_string)
    except:
        return None
Example #29
0
 def version_patch(self, value):  
     v = Version(self.version)
     v.patch = int(value)
     self.version = str(v)
Example #30
0
 def __hash__(self):
     # To be honest, I'm not sure why I had to make this explicit
     return Version.__hash__(self)
Example #31
0
 def __lt__(self, other):
     if isinstance(other, (str, tuple, list)):
         other = AsdfVersion(other)
     return Version.__lt__(self, other)
Example #32
0
def find_solc_versions(
    contract_sources: Dict[str, str],
    install_needed: bool = False,
    install_latest: bool = False,
    silent: bool = True,
) -> Dict:
    """
    Analyzes contract pragmas and determines which solc version(s) to use.

    Args:
        contract_sources: a dictionary in the form of {'path': "source code"}
        install_needed: if True, will install when no installed version matches
                        the contract pragma
        install_latest: if True, will install when a newer version is available
                        than the installed one
        silent: set to False to enable verbose reporting

    Returns: dictionary of {'version': ['path', 'path', ..]}
    """

    available_versions, installed_versions = _get_solc_version_list()

    pragma_specs: Dict = {}
    to_install = set()
    new_versions = set()

    for path, source in contract_sources.items():

        pragma_string = next(PRAGMA_REGEX.finditer(source), None)
        if pragma_string is None:
            raise PragmaError(f"No version pragma in '{path}'")
        pragma_specs[path] = NpmSpec(pragma_string.groups()[0])
        version = pragma_specs[path].select(installed_versions)

        if not version and not (install_needed or install_latest):
            raise IncompatibleSolcVersion(
                f"No installed solc version matching '{pragma_string[0]}' in '{path}'"
            )

        # if no installed version of solc matches the pragma, find the latest available version
        latest = pragma_specs[path].select(available_versions)

        if not version and not latest:
            raise IncompatibleSolcVersion(
                f"No installable solc version matching '{pragma_string[0]}' in '{path}'"
            )

        if not version or (install_latest and latest > version):
            to_install.add(latest)
        elif latest and latest > version:
            new_versions.add(str(version))

    # install new versions if needed
    if to_install:
        install_solc(*to_install)
        installed_versions = [
            Version(i[1:]) for i in solcx.get_installed_solc_versions()
        ]
    elif new_versions and not silent:
        print(
            f"New compatible solc version{'s' if len(new_versions) > 1 else ''}"
            f" available: {', '.join(new_versions)}")

    # organize source paths by latest available solc version
    compiler_versions: Dict = {}
    for path, spec in pragma_specs.items():
        version = spec.select(installed_versions)
        compiler_versions.setdefault(str(version), []).append(path)

    return compiler_versions
Example #33
0
class TestSMBOpener(unittest.TestCase):
    @unittest.skipIf(utils.FSVERSION <= Version("2.0.7"), 'not supported')
    def test_timeout_parameter(self):
        self.fs = fs.open_fs('smb://*****:*****@127.0.0.1/data?timeout=5')
        self.assertEqual(self.fs.delegate_fs()._timeout, 5)

    @utils.py2expectedFailure
    def test_bad_host(self):
        self.assertRaises(
            fs.errors.CreateFailed,
            fs.open_fs,
            'smb://NONSENSE/?timeout=2',
        )

    def test_bad_ip(self):
        self.assertRaises(
            fs.errors.CreateFailed,
            fs.open_fs,
            'smb://84.190.160.12/?timeout=2',
        )

    @utils.py2expectedFailure
    def test_host(self):
        self.fs = fs.open_fs('smb://*****:*****@SAMBAALPINE/')

    def test_ip(self):
        self.fs = fs.open_fs('smb://*****:*****@127.0.0.1/')

    @mock.patch.object(SMBFS, 'NETBIOS', mock.MagicMock())
    def test_hostname_and_ip(self):
        self.fs = fs.open_fs(
            'smb://*****:*****@127.0.0.1/?hostname=SAMBAALPINE')
        SMBFS.NETBIOS.queryIPforName.assert_not_called()
        SMBFS.NETBIOS.queryName.assert_not_called()

    def test_default_smb_port(self):
        self.fs = fs.open_fs('smb://*****:*****@127.0.0.1/')

        self.assertEqual(self.fs._smb.sock.getpeername()[1], 139)

    def test_explicit_smb_port(self):
        self.fs = fs.open_fs(
            'smb://*****:*****@127.0.0.1:445/?direct-tcp=True')

        self.assertEqual(self.fs._smb.sock.getpeername()[1], 445)

    def test_create(self):

        directory = "data/test/directory"
        base = "smb://*****:*****@127.0.0.1"
        url = "{}/{}".format(base, directory)

        # Make sure unexisting directory raises `CreateFailed`
        with self.assertRaises(fs.errors.CreateFailed):
            smb_fs = fs.open_fs(url)

        # Open with `create` and try touching a file
        with fs.open_fs(url, create=True) as smb_fs:
            smb_fs.touch("foo")

        # Open the base filesystem and check the subdirectory exists
        with fs.open_fs(base) as smb_fs:
            self.assertTrue(smb_fs.isdir(directory))
            self.assertTrue(smb_fs.isfile(fs.path.join(directory, "foo")))

        # Open without `create` and check the file exists
        with fs.open_fs(url) as smb_fs:
            self.assertTrue(smb_fs.isfile("foo"))

        # Open with create and check this does fail
        with fs.open_fs(url, create=True) as smb_fs:
            self.assertTrue(smb_fs.isfile("foo"))
Example #34
0
def device(name, *args, **kwargs):
    r"""device(name, wires=1, *args, **kwargs)
    Load a :class:`~.Device` and return the instance.

    This function is used to load a particular quantum device,
    which can then be used to construct QNodes.

    PennyLane comes with support for the following devices:

    * :mod:`'default.qubit' <pennylane.devices.default_qubit>`: a simple
      state simulator of qubit-based quantum circuit architectures.

    * :mod:`'default.gaussian' <pennylane.devices.default_gaussian>`: a simple simulator
      of Gaussian states and operations on continuous-variable circuit architectures.

    * :mod:`'default.qubit.tf' <pennylane.devices.default_qubit_tf>`: a state simulator
      of qubit-based quantum circuit architectures written in TensorFlow, which allows
      automatic differentiation through the simulation.

    * :mod:`'default.qubit.autograd' <pennylane.devices.default_qubit_autograd>`: a state simulator
      of qubit-based quantum circuit architectures which allows
      automatic differentiation through the simulation via python's autograd library.

    Additional devices are supported through plugins — see
    the  `available plugins <https://pennylane.ai/plugins.html>`_ for more
    details.

    All devices must be loaded by specifying their **short-name** as listed above,
    followed by the **wires** (subsystems) you wish to initialize. The *wires*
    argument can be an integer, in which case the wires of the device are addressed
    by consecutive integers:

    .. code-block:: python

        dev = qml.device('default.qubit', wires=5)

        def circuit():
           qml.Hadamard(wires=1)
           qml.Hadamard(wires=[0])
           qml.CNOT(wires=[3, 4])
           ...

    The *wires* argument can also be a sequence of unique numbers or strings, specifying custom wire labels
    that the user employs to address the wires:

    .. code-block:: python

        dev = qml.device('default.qubit', wires=['ancilla', 'q11', 'q12', -1, 1])

        def circuit():
           qml.Hadamard(wires='q11')
           qml.Hadamard(wires=['ancilla'])
           qml.CNOT(wires=['q12', -1] )
           ...

    Most devices accept a ``shots`` argument which specifies how many circuit executions
    are used to estimate stochastic return values. In particular, ``qml.sample()`` measurements
    will return as many samples as specified in the shots argument. The shots argument can be
    changed on a per-call basis using the built-in ``shots`` keyword argument.

    .. code-block:: python

        dev = qml.device('default.qubit', wires=1, shots=10)

        @qml.qnode(dev)
        def circuit(a):
          qml.RX(a, wires=0)
          return qml.sample(qml.PauliZ(wires=0))

    >>> circuit(0.8)  # 10 samples are returned
    [ 1  1  1 -1 -1  1  1  1  1  1]
    >>> circuit(0.8, shots=3))  # default is overwritten for this call
    [1 1 1]
    >>> circuit(0.8)  # back to default of 10 samples
    [ 1  1  1 -1 -1  1  1  1  1  1]

    Some devices may accept additional arguments. For instance,
    ``default.gaussian`` accepts the keyword argument ``hbar``, to set
    the convention used in the commutation relation :math:`[\x,\p]=i\hbar`
    (by default set to 2).

    Please refer to the documentation for the individual devices to see any
    additional arguments that might be required or supported.

    Args:
        name (str): the name of the device to load
        wires (int): the number of wires (subsystems) to initialise
            the device with

    Keyword Args:
        config (pennylane.Configuration): a PennyLane configuration object
            that contains global and/or device specific configurations.
    """
    if name not in plugin_devices:
        # Device does not exist in the loaded device list.
        # Attempt to refresh the devices, in case the user
        # installed the plugin during the current Python session.
        refresh_devices()

    if name in plugin_devices:
        options = {}

        # load global configuration settings if available
        config = kwargs.get("config", default_config)

        if config:
            # combine configuration options with keyword arguments.
            # Keyword arguments take preference, followed by device options,
            # followed by plugin options, followed by global options.
            options.update(config["main"])
            options.update(config[name.split(".")[0] + ".global"])
            options.update(config[name])

        kwargs.pop("config", None)
        options.update(kwargs)

        # loads the device class
        plugin_device_class = plugin_devices[name].load()

        if Version(version()) not in Spec(
                plugin_device_class.pennylane_requires):
            raise DeviceError(
                "The {} plugin requires PennyLane versions {}, however PennyLane "
                "version {} is installed.".format(
                    name, plugin_device_class.pennylane_requires, __version__))

        # load device
        return plugin_device_class(*args, **options)

    raise DeviceError(
        "Device does not exist. Make sure the required plugin is installed.")
Example #35
0
def semantic_version_key(file_data):
    return Version.coerce(make_safe_version(file_data['version']))
Example #36
0
 def __init__(self, name, version):
     self.name = name
     self.version_string = version
     self.version = Version.coerce(version)
Example #37
0
#!/usr/bin/env python3

import subprocess
from semantic_version import Version

r = subprocess.check_output("zypper se -s 'kernel-default*'",
                            shell=True,
                            encoding='utf8')

remove = []

for line in r.split('\n'):
    tokens = [x.strip() for x in line.split('|')]
    if len(tokens) != 6:
        continue

    if tokens[1] == 'kernel-default':
        remove.append(tokens[3])

remove = sorted(remove, key=lambda x: Version(x))
last = remove[-1]
remove = [x for x in remove if x != last]

print('zypper rm ', end='')
for r in remove:
    print('kernel-default-' + r + ' ', end='')
print()
Example #38
0
__all__ = ['magics', 'mongonbmanager', 'ws_util', 'common', 'kbasewsmanager']

from semantic_version import Version
__version__ = Version("0.2.0")
version = lambda: __version__
Example #39
0
    def generate_cluster_info(self):
        """
        Generates the cluster information file
        """
        logger.info("Generating cluster information file")

        # get kubeconfig and upload to httpd server
        kubeconfig = os.path.join(self.cluster_path,
                                  config.RUN.get('kubeconfig_location'))
        remote_path = os.path.join(config.ENV_DATA.get('path_to_upload'),
                                   f"{config.RUN.get('run_id')}_kubeconfig")
        upload_file(config.ENV_DATA.get('httpd_server'), kubeconfig,
                    remote_path, config.ENV_DATA.get('httpd_server_user'),
                    config.ENV_DATA.get('httpd_server_password'))

        #  Form the kubeconfig url path
        kubeconfig_url_path = os.path.join(
            'http://', config.ENV_DATA.get('httpd_server'),
            remote_path.lstrip('/var/www/html/'))
        config.ENV_DATA['kubeconfig_url'] = kubeconfig_url_path

        # get the infra_id
        infra_id = get_infra_id(self.cluster_path)
        config.ENV_DATA['infra_id'] = infra_id

        # get the cluster id
        cluster_id = get_cluster_id(self.cluster_path)
        config.ENV_DATA['cluster_id'] = cluster_id

        # fetch the installer version
        installer_version_str = run_cmd(
            f"{config.RUN['bin_dir']}/openshift-install version")
        installer_version = installer_version_str.split()[1]
        config.ENV_DATA['installer_version'] = installer_version

        # get the major and minor version of OCP
        version_obj = Version(installer_version)
        ocp_version_x = version_obj.major
        ocp_version_y = version_obj.minor
        config.ENV_DATA['ocp_version_x'] = ocp_version_x
        config.ENV_DATA['ocp_version_y'] = ocp_version_y

        # generate the cluster info yaml file
        terraform_var_template = "cluster_info.yaml.j2"
        terraform_var_template_path = os.path.join("ocp-deployment",
                                                   terraform_var_template)
        terraform_config_str = self._templating.render_template(
            terraform_var_template_path, config.ENV_DATA)
        terraform_var_yaml = os.path.join(self.cluster_path,
                                          constants.TERRAFORM_DATA_DIR,
                                          constants.SCALEUP_TERRAFORM_DATA_DIR,
                                          "cluster_info.yaml")

        with open(terraform_var_yaml, "w") as f:
            f.write(terraform_config_str)

        # config.ENV_DATA['dns_server'] = config.ENV_DATA['dns']
        template_vars = (f"\"dns_server: {config.ENV_DATA['dns']}"
                         f"\\nremove_rhcos_worker: 'yes'\\n\"")

        replace_content_in_file(terraform_var_yaml, "PLACEHOLDER",
                                template_vars)
        logger.info(f"cluster yaml file: {terraform_var_yaml}")
Example #40
0
def parse_version(string):
    if string[0] == 'v':
        return Version.coerce(string[1:])
    return Version(string)
Example #41
0
def inc_patch(version):
    v = Version(str(version))
    v.patch = v.patch + 1
    return v
Example #42
0
from semantic_version import Version

CURRENT_VERSION = Version("4.0.0")
MIN_SUPPORTED_VERSION = Version("4.0.0")
MAX_SUPPORTED_VERSION = Version("4.0.0")
Example #43
0
from __future__ import annotations

from typing import Any, Optional, Union

from semantic_version import Version

VERSION = Version("1.2.2")

RELEASES_RSS_URL = "https://pypi.org/rss/project/ideaseed/releases.xml"

COLOR_NAME_TO_HEX_MAP: dict[str, str] = {
    "Blue": "AECBFA",
    "Brown": "E6C9A8",
    "DarkBlue": "AECBFA",
    "Gray": "E8EAED",
    "Green": "CCFF90",
    "Orange": "FBBC04",
    "Pink": "FDCFE8",
    "Purple": "D7AEFB",
    "Red": "F28B82",
    "Teal": "A7FFEB",
    "White": "FFFFFF",
    "Yellow": "FFF475",
}

COLOR_ALIASES = {
    "Cyan": "Teal",
    "Indigo": "DarkBlue",
    "Grey": "Gray",
    "Magenta": "Purple",
}
Example #44
0
from vyper.exceptions import VyperException

from brownie.exceptions import CompilerError, IncompatibleVyperVersion
from brownie.project import sources
from brownie.project.compiler.utils import expand_source_map
from brownie.project.sources import is_inside_offset

vvm_logger = logging.getLogger("vvm")
vvm_logger.setLevel(10)
sh = logging.StreamHandler()
sh.setLevel(10)
sh.setFormatter(logging.Formatter("%(message)s"))
vvm_logger.addHandler(sh)

AVAILABLE_VYPER_VERSIONS = None
_active_version = Version(vyper.__version__)


def get_version() -> Version:
    return _active_version


def set_vyper_version(version: Union[str, Version]) -> str:
    """Sets the vyper version. If not available it will be installed."""
    global _active_version
    if isinstance(version, str):
        version = Version(version)
    if version != Version(vyper.__version__):
        try:
            vvm.set_vyper_version(version, silent=True)
        except vvm.exceptions.VyperNotInstalled:
Example #45
0
def device(name, *args, **kwargs):
    r"""device(name, wires=1, *args, **kwargs)
    Load a plugin :class:`~.Device` and return the instance.

    This function is used to load a particular quantum device,
    which can then be used to construct QNodes.

    PennyLane comes with support for the following two devices:

    * :mod:`'default.qubit' <pennylane.plugins.default_qubit>`: a simple pure
      state simulator of qubit-based quantum circuit architectures.

    * :mod:`'default.gaussian' <pennylane.plugins.default_gaussian>`: a simple simulator
      of Gaussian states and operations on continuous-variable circuit architectures.

    In addition, additional devices are supported through plugins — see
    the  `available plugins <https://pennylane.ai/plugins.html>`_ for more
    details.

    All devices must be loaded by specifying their **short-name** as listed above,
    followed by the number of *wires* (subsystems) you wish to initialize.

    Some devices may accept additional arguments. For instance,
    ``default.gaussian`` accepts the keyword argument ``hbar``, to set
    the convention used in the commutation relation :math:`[\x,\p]=i\hbar`
    (by default set to 2).

    Please refer to the documentation for the individual devices to see any
    additional arguments that might be required or supported.

    Args:
        name (str): the name of the device to load
        wires (int): the number of wires (subsystems) to initialise
            the device with

    Keyword Args:
        config (pennylane.Configuration): a PennyLane configuration object
            that contains global and/or device specific configurations.
    """
    if name in plugin_devices:
        options = {}

        # load global configuration settings if available
        config = kwargs.get("config", default_config)

        if config:
            # combine configuration options with keyword arguments.
            # Keyword arguments take preference, followed by device options,
            # followed by plugin options, followed by global options.
            options.update(config["main"])
            options.update(config[name.split(".")[0] + ".global"])
            options.update(config[name])

        kwargs.pop("config", None)
        options.update(kwargs)

        # loads the plugin device class
        plugin_device_class = plugin_devices[name].load()

        if Version(version()) not in Spec(plugin_device_class.pennylane_requires):
            raise DeviceError(
                "The {} plugin requires PennyLane versions {}, however PennyLane "
                "version {} is installed.".format(
                    name, plugin_device_class.pennylane_requires, __version__
                )
            )

        # load plugin device
        return plugin_device_class(*args, **options)

    raise DeviceError(
        "Device does not exist. Make sure the required plugin is installed."
    )
    def __init__(self, user, repository, client):
        self.user = user
        self.name = repository
        self.contributors = False
        self.collaborators = {}
        self.client = client
        self.client.set_user_agent('gitconsensus')
        self.repository = self.client.repository(self.user, self.name)
        consensusurl = consensus_url_template % (self.user, self.name)
        res = githubApiRequest(consensusurl, self.client)
        self.rules = False
        if res.status_code == 200:
            ruleresults = res.json()
            self.rules = yaml.safe_load(
                base64.b64decode(ruleresults['content']).decode('utf-8'))
            # support older versions by converting from day to hours.
            if 'version' not in self.rules or self.rules['version'] < 2:
                if 'mergedelay' in self.rules and self.rules['mergedelay']:
                    self.rules['mergedelay'] = self.rules['mergedelay'] * 24
                if 'timeout' in self.rules and self.rules['timeout']:
                    self.rules['timeout'] = self.rules['timeout'] * 24
                self.rules['version'] = 2

            if self.rules['version'] < 3:
                self.rules['version'] = 3
                self.rules['pull_requests'] = {
                    "quorum":
                    self.rules.get('quorum', False),
                    "threshold":
                    self.rules.get('threshold', False),
                    "contributors_only":
                    self.rules.get('contributorsonly', False),
                    "collaborators_only":
                    self.rules.get('collaboratorsonly', False),
                    "whitelist":
                    self.rules.get('whitelist'),
                    "blacklist":
                    self.rules.get('blacklist'),
                    "merge_delay":
                    self.rules.get('mergedelay', False),
                    "delay_override":
                    self.rules.get('delayoverride', False),
                    "merge_delay_min":
                    self.rules.get('mergedelaymin', False),
                    "license_delay":
                    self.rules.get('licenseddelay', False),
                    "license_lock":
                    self.rules.get('locklicense', False),
                    "consensus_delay":
                    self.rules.get('consensusdelay', False),
                    "consensus_lock":
                    self.rules.get('lockconsensus', False),
                    "timeout":
                    self.rules.get('timeout')
                }

            if int(self.rules['pull_requests']['threshold']) > 1:
                self.rules['pull_requests']['threshold'] /= 100

            # Treat higher version consensus rules are an unconfigured repository.
            project_consensus_version = Version(str(self.rules['version']),
                                                partial=True)
            if max_consensus_version < project_consensus_version:
                self.rules = False
Example #47
0
def _check_version(version):
    version = Version(version.lstrip("v"))
    if version not in SimpleSpec(">=0.4.11"):
        raise ValueError("py-solc-x does not support solc versions <0.4.11")
    return "v" + str(version)
Example #48
0
        def deploy(self, log_cli_level='DEBUG'):
            """
            Deployment specific to OCP cluster on this platform

            Args:
                log_cli_level (str): openshift installer's log level
                    (default: "DEBUG")

            """
            logger.info("Deploying OCP cluster for vSphere platform")
            logger.info(
                f"Openshift-installer will be using loglevel:{log_cli_level}"
            )
            os.chdir(self.terraform_data_dir)
            self.terraform.initialize()
            self.terraform.apply(self.terraform_var)
            os.chdir(self.previous_dir)
            logger.info("waiting for bootstrap to complete")
            try:
                run_cmd(
                    f"{self.installer} wait-for bootstrap-complete "
                    f"--dir {self.cluster_path} "
                    f"--log-level {log_cli_level}",
                    timeout=3600
                )
            except CommandFailed as e:
                if constants.GATHER_BOOTSTRAP_PATTERN in str(e):
                    try:
                        gather_bootstrap()
                    except Exception as ex:
                        logger.error(ex)
                raise e

            if not config.DEPLOYMENT['preserve_bootstrap_node']:
                logger.info("removing bootstrap node")
                os.chdir(self.terraform_data_dir)
                self.terraform.apply(
                    self.terraform_var, bootstrap_complete=True
                )
                os.chdir(self.previous_dir)

            OCP.set_kubeconfig(self.kubeconfig)

            approve_pending_csr()

            # wait for image registry to show-up
            co = "image-registry"
            wait_for_co(co)

            # patch image registry to null
            self.configure_storage_for_image_registry(self.kubeconfig)

            # wait for install to complete
            logger.info("waiting for install to complete")
            run_cmd(
                f"{self.installer} wait-for install-complete "
                f"--dir {self.cluster_path} "
                f"--log-level {log_cli_level}",
                timeout=1800
            )

            # wait for all nodes to generate CSR
            # From OCP version 4.4 and above, we have to approve CSR manually
            # for all the nodes
            ocp_version = get_ocp_version()
            if Version.coerce(ocp_version) >= Version.coerce('4.4'):
                wait_for_all_nodes_csr()

            approve_pending_csr()
            self.test_cluster()
Example #49
0
def get_pyethereum_version():
    try:
        return Version(pkg_resources.get_distribution("ethereum").version)
    except pkg_resources.DistributionNotFound:
        return None
Example #50
0
    'version_str, expected',
    (
        ('', None),
        ('a', None),
        ('0', '0.0.0'),
        ('0.1', '0.1.0'),
        ('0.0.1', '0.0.1'),
        ('01.02.03', '1.2.3'),
        (1, '1.0.0'),
        (1.02, '1.2.0'),
        ('1.2.3.4', None),
        ('1.2.3-', None),
        ('1.2.3-4', None),
        ('1.2.3_4', None),
        ('1.2.3+4', None),
        (Version('1.2.3'), '1.2.3'),
        (Version('1.2.3-4'), None),
        (Version('1.2', partial = True), None),
    )
)
def test_box_metadata_version(version_str, expected):
    if expected is not None:
        assert str(parse_version(version_str)) == expected

    else:
        with pytest.raises(BoxVersionException):
            parse_version(version_str)


@pytest.mark.parametrize(
    'url, expected',
Example #51
0
def test_version():
    assert compiler.vyper.get_version() == Version.coerce(vyper.__version__)
Example #52
0
def parse_versioned_name(name):
    if '@' not in name:
        return name, None
    name, version = name.split('@', 1)
    return name, Version.coerce(version)
Example #53
0
import json
import os
import posixpath
import sys
import warnings
from typing import Any, Dict, IO, Iterator, List, Optional, Union

from jsonschema import Draft4Validator, RefResolver, ValidationError
from pkg_resources import resource_filename
from semantic_version import Version
from slicedimage import VERSIONS as SLICEDIMAGE_VERSIONS

from starfish.core.codebook._format import CURRENT_VERSION as CODEBOOK_CURRENT_VERSION
from starfish.core.experiment.version import CURRENT_VERSION as EXPERIMENT_CURRENT_VERSION

TILESET_CURRENT_VERSION = Version(SLICEDIMAGE_VERSIONS[-1].VERSION)
package_name = 'starfish'


def _get_absolute_schema_path(schema_name: str) -> str:
    """turn the name of the schema into an absolute path by joining it to <package_root>/schema."""
    return resource_filename(
        "starfish", posixpath.join("spacetx_format", "schema", schema_name))


class SpaceTxValidator:
    def __init__(self, schema: str) -> None:
        """create a validator for a json-schema compliant spaceTx specification file

        Parameters
        ----------
Example #54
0
def compile_and_format(
    contract_sources: Dict[str, str],
    solc_version: Optional[str] = None,
    vyper_version: Optional[str] = None,
    optimize: bool = True,
    runs: int = 200,
    evm_version: Optional[str] = None,
    silent: bool = True,
    allow_paths: Optional[str] = None,
    interface_sources: Optional[Dict[str, str]] = None,
    remappings: Optional[list] = None,
    optimizer: Optional[Dict] = None,
) -> Dict:
    """Compiles contracts and returns build data.

    Args:
        contract_sources: a dictionary in the form of {'path': "source code"}
        solc_version: solc version to compile with (use None to set via pragmas)
        optimize: (deprecated) enable solc optimizer
        runs: (deprecated) optimizer runs
        evm_version: evm version to compile for
        silent: verbose reporting
        allow_paths: compiler allowed filesystem import path
        interface_sources: dictionary of interfaces as {'path': "source code"}
        remappings: list of solidity path remappings
        optimizer: dictionary of solidity optimizer settings

    Returns:
        build data dict
    """
    if not contract_sources:
        return {}
    if interface_sources is None:
        interface_sources = {}

    if [i for i in contract_sources if Path(i).suffix not in (".sol", ".vy")]:
        raise UnsupportedLanguage(
            "Source suffixes must be one of ('.sol', '.vy')")
    if [
            i for i in interface_sources
            if Path(i).suffix not in (".sol", ".vy", ".json")
    ]:
        raise UnsupportedLanguage(
            "Interface suffixes must be one of ('.sol', '.vy', '.json')")

    build_json: Dict = {}
    compiler_targets = {}

    vyper_sources = {
        k: v
        for k, v in contract_sources.items() if Path(k).suffix == ".vy"
    }
    if vyper_sources:
        # TODO add `vyper_version` input arg to manually specify, support in config file
        if vyper_version is None:
            compiler_targets.update(
                find_vyper_versions(vyper_sources,
                                    install_needed=True,
                                    silent=silent))
        else:
            compiler_targets[vyper_version] = list(vyper_sources)
    solc_sources = {
        k: v
        for k, v in contract_sources.items() if Path(k).suffix == ".sol"
    }
    if solc_sources:
        if solc_version is None:
            compiler_targets.update(
                find_solc_versions(solc_sources,
                                   install_needed=True,
                                   silent=silent))
        else:
            compiler_targets[solc_version] = list(solc_sources)

        if optimizer is None:
            optimizer = {"enabled": optimize, "runs": runs if optimize else 0}

    for version, path_list in compiler_targets.items():
        compiler_data: Dict = {}
        if path_list[0].endswith(".vy"):
            set_vyper_version(version)
            language = "Vyper"
            compiler_data["version"] = str(vyper.get_version())
            interfaces = {
                k: v
                for k, v in interface_sources.items()
                if Path(k).suffix != ".sol"
            }
        else:
            set_solc_version(version)
            language = "Solidity"
            compiler_data["version"] = str(solidity.get_version())
            interfaces = {
                k: v
                for k, v in interface_sources.items()
                if Path(k).suffix == ".sol"
                and Version(version) in sources.get_pragma_spec(v, k)
            }

        to_compile = {
            k: v
            for k, v in contract_sources.items() if k in path_list
        }

        input_json = generate_input_json(
            to_compile,
            evm_version=evm_version,
            language=language,
            interface_sources=interfaces,
            remappings=remappings,
            optimizer=optimizer,
        )

        output_json = compile_from_input_json(input_json, silent, allow_paths)
        build_json.update(
            generate_build_json(input_json, output_json, compiler_data,
                                silent))

    return build_json
Example #55
0
class ReleaseTest:

    st_bool = strategy("bool")

    def __init__(self, gov, registry, create_token, create_vault):
        self.gov = gov
        self.registry = registry
        self.create_token = lambda s: create_token()

        def create_vault_adaptor(self, *args, **kwargs):
            return create_vault(*args, **kwargs)

        self.create_vault = create_vault_adaptor

    def setup(self):
        self.latest_version = Version("1.0.0")
        token = self.create_token()
        vault = self.create_vault(token, version=str(self.latest_version))
        self.vaults = {token: [vault]}
        self.registry.newRelease(vault, {"from": self.gov})
        self.experiments = {}

    def rule_new_release(self, new_token="st_bool"):
        if new_token or len(self.vaults.keys()) == 0:
            token = self.create_token()
        else:
            token = list(self.vaults.keys())[-1]

        self.latest_version = self.latest_version.next_patch()

        vault = self.create_vault(token, version=str(self.latest_version))
        print(f"Registry.newRelease({token}, {self.latest_version})")
        self.registry.newRelease(vault, {"from": self.gov})

        if token in self.vaults:
            self.vaults[token].append(vault)
        else:
            self.vaults[token] = [vault]

    def rule_new_deployment(self, new_token="st_bool"):
        tokens_with_stale_deployments = [
            token for token, deployments in self.vaults.items()
            if Version(deployments[-1].apiVersion()) < self.latest_version
        ]
        if new_token or len(tokens_with_stale_deployments) == 0:
            token = self.create_token()
        else:
            token = tokens_with_stale_deployments[-1]

        print(f"Registry.newVault({token}, {self.latest_version})")
        vault = Vault.at(
            self.registry.newVault(token, self.gov, self.gov, "",
                                   "").return_value)

        if token in self.vaults:
            self.vaults[token].append(vault)
        else:
            self.vaults[token] = [vault]

    def rule_new_experiment(self):
        token = self.create_token()
        print(f"Registry.newExperimentalVault({token}, {self.latest_version})")

        vault = Vault.at(
            self.registry.newExperimentalVault(token, self.gov, self.gov,
                                               self.gov, "", "").return_value)

        self.experiments[token] = [vault]

    def rule_endorse_experiment(self):
        experiments_with_latest_api = [
            (token, deployments[-1])
            for token, deployments in self.experiments.items()
            if (Version(deployments[-1].apiVersion()) == self.latest_version
                and (token not in self.vaults
                     or Version(self.vaults[token][-1].apiVersion()) < Version(
                         deployments[-1].apiVersion())))
        ]
        if len(experiments_with_latest_api) > 0:
            token, vault = experiments_with_latest_api[-1]
            print(f"Registry.endorseVault({token}, {self.latest_version})")
            self.registry.endorseVault(vault, {"from": self.gov})

            if token in self.vaults:
                self.vaults[token].append(vault)
            else:
                self.vaults[token] = [vault]

    def invariant(self):
        for token, deployments in self.vaults.items():
            # Check that token matches up
            assert deployments[0].token() == token
            # Strictly linearly increasing versions
            last_version = Version(deployments[0].apiVersion())
            assert last_version <= self.latest_version

            for vault in deployments[1:]:
                # Check that token matches up
                assert vault.token() == token
                # Strictly linearly increasing versions
                assert last_version < Version(
                    vault.apiVersion()) <= self.latest_version
Example #56
0
from setuptools import setup, find_packages
from semantic_version import Version
"""
To release package update `package_version` and commit/push changes, then run
$ python setup.py sdist bdist_wheel
$ twine upload dist/*

for twine upload to work, must have credentials in .pypirc file
"""

with open('README.md', 'r') as fh:
    long_description = fh.read()

package_version = Version(major=0, minor=1, patch=0, prerelease=('alpha', '1'))
# package_version = Version('0.5.0')  # Use this when installing locally with pip install -e

setup(
    name='capanno_utils',
    version=str(package_version),
    packages=find_packages(),
    url='https://github.com/truwl/capanno-utils',
    license='Apache 2.0',
    author='Karl Sebby',
    author_email='*****@*****.**',
    description='Tool for managing bioinformatics content repositories.',
    long_description=long_description,
    long_description_content_type="text/markdown",
    install_requires=[
        'setuptools',
        'requests',
        'ruamel.yaml >= 0.15, <=0.16',
Example #57
0
    def test_rgw_kafka_notifications(self, bucket_factory):
        """
        Test to verify rgw kafka notifications

        """
        # Get sc
        sc = default_storage_class(interface_type=constants.CEPHBLOCKPOOL)

        # Deploy amq cluster
        self.amq.setup_amq_cluster(sc.name)

        # Create topic
        self.kafka_topic = self.amq.create_kafka_topic()

        # Create Kafkadrop pod
        (
            self.kafkadrop_pod,
            self.kafkadrop_pod,
            self.kafkadrop_route,
        ) = self.amq.create_kafkadrop()

        # Get the kafkadrop route
        kafkadrop_host = self.kafkadrop_route.get().get("spec").get("host")

        # Create bucket
        bucketname = bucket_factory(amount=1, interface="RGW-OC")[0].name

        # Get RGW credentials
        rgw_obj = RGW()
        rgw_endpoint, access_key, secret_key = rgw_obj.get_credentials()

        # Clone notify repo
        notify_path = clone_notify()

        # Initialise to put objects
        data = "A random string data to write on created rgw bucket"
        obc_obj = OBC(bucketname)
        s3_resource = boto3.resource(
            "s3",
            verify=retrieve_verification_mode(),
            endpoint_url=rgw_endpoint,
            aws_access_key_id=obc_obj.access_key_id,
            aws_secret_access_key=obc_obj.access_key,
        )
        s3_client = s3_resource.meta.client

        # Initialize notify command to run
        notify_cmd = (
            f"python {notify_path} -e {rgw_endpoint} -a {obc_obj.access_key_id} "
            f"-s {obc_obj.access_key} -b {bucketname} -ke {constants.KAFKA_ENDPOINT} -t {self.kafka_topic.name}"
        )
        log.info(f"Running cmd {notify_cmd}")

        # Put objects to bucket
        assert s3_client.put_object(Bucket=bucketname, Key="key-1",
                                    Body=data), "Failed: Put object: key-1"
        exec_cmd(notify_cmd)

        # Validate rgw logs notification are sent
        # No errors are seen
        pattern = "ERROR: failed to create push endpoint"
        rgw_pod_obj = get_rgw_pods()
        rgw_log = get_pod_logs(pod_name=rgw_pod_obj[0].name, container="rgw")
        assert re.search(pattern=pattern, string=rgw_log) is None, (
            f"Error: {pattern} msg found in the rgw logs."
            f"Validate {pattern} found on rgw logs and also "
            f"rgw bucket notification is working correctly")
        assert s3_client.put_object(Bucket=bucketname, Key="key-2",
                                    Body=data), "Failed: Put object: key-2"
        exec_cmd(notify_cmd)

        # Validate message are received Kafka side using curl command
        # A temporary way to check from Kafka side, need to check from UI
        curl_command = (
            f"curl -X GET {kafkadrop_host}/topic/{self.kafka_topic.name} "
            "-H 'content-type: application/vnd.kafka.json.v2+json'")
        json_output = run_cmd(cmd=curl_command)
        new_string = json_output.split()
        messages = new_string[new_string.index("messages</td>") + 1]
        if messages.find("1") == -1:
            raise Exception(
                "Error: Messages are not recieved from Kafka side."
                "RGW bucket notification is not working as expected.")

        # Validate the timestamp events
        ocs_version = config.ENV_DATA["ocs_version"]
        if Version.coerce(ocs_version) >= Version.coerce("4.8"):
            cmd = (
                f"bin/kafka-console-consumer.sh --bootstrap-server {constants.KAFKA_ENDPOINT} "
                f"--topic {self.kafka_topic.name} --from-beginning --timeout-ms 20000"
            )
            pod_list = get_pod_name_by_pattern(
                pattern="my-cluster-zookeeper",
                namespace=constants.AMQ_NAMESPACE)
            zookeeper_obj = get_pod_obj(name=pod_list[0],
                                        namespace=constants.AMQ_NAMESPACE)
            event_obj = zookeeper_obj.exec_cmd_on_pod(command=cmd)
            log.info(f"Event obj: {event_obj}")
            event_time = event_obj.get("Records")[0].get("eventTime")
            format_string = "%Y-%m-%dT%H:%M:%S.%fZ"
            try:
                datetime.strptime(event_time, format_string)
            except ValueError as ef:
                log.error(
                    f"Timestamp event {event_time} doesnt match the pattern {format_string}"
                )
                raise ef

            log.info(
                f"Timestamp event {event_time} matches the pattern {format_string}"
            )
Example #58
0
import os
import sys
import json
from semantic_version import Version

sys.path.insert(0, os.path.abspath('../..'))

with open('../../setup.json') as f:
    setup_data = json.load(f)

project = 'configpp'
copyright = setup_data["author"]
author = setup_data["author"]

semver = Version(setup_data["version"])

# The short X.Y version
version = "{major}.{minor}".format(**semver.__dict__)
# The full version, including alpha/beta/rc tags.
release = setup_data["version"]

# -- General configuration ---------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
Example #59
0
def parse_version(version_string):
    """Parse a string into a PackageVersion."""
    try:
        return Version.coerce(version_string)
    except Exception:
        return None
import base64
import datetime
import github3
import json
import requests
from semantic_version import Version
import yaml

# .gitconsensus.yaml files with versions higher than this will be ignored.
max_consensus_version = Version('3.0.0', partial=True)

message_template = """
This Pull Request has been %s by [GitConsensus](https://www.gitconsensus.com/).

## Vote Totals

| Yes | No | Abstain | Voters |
| --- | -- | ------- | ------ |
| %s  | %s | %s      | %s     |


## Vote Breakdown

%s


## Vote Results

| Criteria   | Result |
| ---------- | ------ |
| Has Quorum | %s     |