コード例 #1
0
def get_version():

    from idigbio_media_appliance.version import VERSION
    version = VERSION

    for f in ["package.json", "bower.json"]:
        with io.open(f, "r") as jf:
            config = json.load(jf)

        vs = config["version"]
        if version is None:
            version = vs
        elif semver.compare(vs, version) > 0:
            if options["human"]:
                click.echo("{} had larger version. {} > {}".format(f, vs, version))
            version = vs

    for f in ["meta.yaml", "construct.yaml"]:
        with io.open(f, "r") as yf:
            config = yaml.load(yf)

        if "package" in config:
            vs = config["package"]["version"]
        else:
            vs = config["version"]

        if version is None:
            version = vs
        elif semver.compare(vs, version) > 0:
            if options["human"]:
                click.echo("{} had larger version. {} > {}".format(f, vs, version))
            version = vs

    return version
コード例 #2
0
ファイル: commands.py プロジェクト: hamon-in/invoice
 def info(self):
     sess = model.get_session(self.args['db'])
     db_version = sess.query(model.Config).filter(model.Config.name == "version").one().value
     print("Database version %s".format(db_version))
     print("Software version %s".format(__version__))
     if semver.compare(db_version, __version__) == -1:
         self.l.info("Database older than software. Consider updating.")
     elif semver.compare(db_version, __version__) == 1:
         self.l.info("Database newer than software. Some operations will not be possible.")
コード例 #3
0
ファイル: test_kibana.py プロジェクト: elastic/beats
    def test_status(self):
        """
        kibana status metricset test
        """

        env = os.environ.get('TESTING_ENVIRONMENT')

        if env == "2x" or env == "5x":
            # Skip for 5.x and 2.x tests as Kibana endpoint not available
            raise SkipTest

        version = self.get_version()
        if semver.compare(version, "6.4.0") == -1:
            # Skip for Kibana versions < 6.4.0 as Kibana endpoint not available
            raise SkipTest

        self.render_config_template(modules=[{
            "name": "kibana",
            "metricsets": ["status"],
            "hosts": self.get_hosts(),
            "period": "1s"
        }])
        proc = self.start_beat()
        self.wait_until(lambda: self.output_lines() > 0, max_timeout=20)
        proc.check_kill_and_wait()
        self.assert_no_logged_warnings()

        output = self.read_output_json()
        self.assertTrue(len(output) >= 1)
        evt = output[0]
        print evt

        self.assert_fields_are_documented(evt)
コード例 #4
0
def get_latest_version_tag():
    try:

        response = urllib2.urlopen("http://archive.fabscan.org/dists/stable/main/binary-armhf/Packages", timeout=5)

        latest_version = __version__
        line = 'START'
        while line != '':
            line = response.readline()
            if PACKAGE_PATTERN.match(line):
                while line != '':
                    line = response.readline()
                    match = VERSION_PATTERN.match(line)
                    if match is not None:
                        package_version = match.group(1)
                        try:
                            if semver.compare(latest_version, package_version) == -1:
                                latest_version = package_version
                        except ValueError:
                            # ignore invalid version number
                            pass
                        break
        return latest_version
    except (Exception, urllib2.URLError) as e:
        _logger.debug(e)
        return "0.0.0"
コード例 #5
0
ファイル: repository.py プロジェクト: pigmej/solar
 def _get_version(self, spec):
     spec = self._parse_spec(spec)
     version = spec['version']
     version_sign = spec['version_sign']
     resource_name = spec['resource_name']
     if version_sign == '==':
         return os.path.join(self.fpath, spec['resource_name'], version)
     found = self.iter_contents(resource_name)
     if version is None:
         sc = semver.compare
         sorted_vers = sorted(found,
                              cmp=lambda a, b: sc(a['version'],
                                                  b['version']),
                              reverse=True)
         if not sorted_vers:
             raise ResourceNotFound(spec)
         version = sorted_vers[0]['version']
     else:
         version = '{}{}'.format(version_sign, version)
         matched = filter(lambda x: semver.match(x['version'], version),
                          found)
         sorted_vers = sorted(matched,
                              cmp=lambda a, b: semver.compare(a['version'],
                                                              b['version']),
                              reverse=True)
         version = next((x['version'] for x in sorted_vers
                         if semver.match(x['version'], version)),
                        None)
     if version is None:
         raise ResourceNotFound(spec)
     return version
コード例 #6
0
ファイル: update.py プロジェクト: AerisCloud/AerisCloud
def cli(force):
    """
    Update AerisCloud
    """
    if not force and config.get('github', 'enabled', default=False) == 'true':
        client = Github().gh
        repo = client.repository('aeriscloud', 'aeriscloud')
        latest_release = repo.iter_releases().next()
        latest_version = latest_release.tag_name[1:]

        if semver.compare(version, latest_version) != -1:
            click.secho('AerisCloud is already up to date!', fg='green')
            sys.exit(0)

        click.echo('A new version of AerisCloud is available: %s (%s)' % (
            click.style(latest_version, fg='green', bold=True),
            click.style(latest_release.name, bold=True)
        ))

    # retrieve install script in a tmpfile
    tmp = tempfile.NamedTemporaryFile()
    r = requests.get('https://raw.githubusercontent.com/' +
                     'AerisCloud/AerisCloud/develop/scripts/install.sh')
    if r.status_code != 200:
        fatal('error: update server returned %d (%s)' % (
            r.status_code, r.reason))

    tmp.write(r.content)
    tmp.flush()

    os.environ['INSTALL_DIR'] = aeriscloud_path
    call(['bash', tmp.name])

    tmp.close()
コード例 #7
0
ファイル: refresh-tp.py プロジェクト: AOSC-Dev/scriptlets
def main():
    if len(sys.argv) < 2:
        print('%s <dir to translations>' % sys.argv[0])
        sys.exit(1)
    logging.warning('Scanning files...')
    local = collect_local_info(sys.argv[1])
    logging.warning('Fetching remote data...')
    remote = collect_remote_info()
    for f in local:
        remote_ver = remote.get(f[0])
        if not remote_ver:
            logging.error('Local file %s not found in remote data' % f[0])
        if f[1] == remote_ver:
            continue
        try:
            if semver.compare(f[1], remote_ver) >= 0:
                continue
        except ValueError:
            pass
        download_po(f[0], remote_ver, 'zh_CN', sys.argv[1])
        po_file = po_name.format(pkg=f[0], ver=f[1], lang='zh_CN')
        po_file = os.path.join(sys.argv[1], po_file)
        pot_file = po_name.format(pkg=f[0], ver=remote_ver, lang='zh_CN')
        pot_file = os.path.join(sys.argv[1], pot_file)
        if not subprocess.call(['msgmerge', po_file, pot_file, '-o', pot_file]):
            os.remove(po_file)
コード例 #8
0
ファイル: test_dashboard.py プロジェクト: gshamov/beats
    def test_load_dashboard_into_space(self, create_space=True):
        """
        Test loading dashboards into Kibana space
        """
        version = self.get_version()
        if semver.compare(version, "6.5.0") == -1:
            # Skip for Kibana versions < 6.5.0 as Kibana Spaces not available
            raise SkipTest

        self.render_config_template()
        if create_space:
            self.create_kibana_space()

        beat = self.start_beat(
            logging_args=["-e", "-d", "*"],
            extra_args=["setup",
                        "--dashboards",
                        "-E", "setup.dashboards.file=" +
                        os.path.join(self.beat_path, "tests", "files", "testbeat-dashboards.zip"),
                        "-E", "setup.dashboards.beat=testbeat",
                        "-E", "setup.kibana.protocol=http",
                        "-E", "setup.kibana.host=" + self.get_kibana_host(),
                        "-E", "setup.kibana.port=" + self.get_kibana_port(),
                        "-E", "setup.kibana.space.id=foo-bar",
                        "-E", "output.elasticsearch.hosts=['" + self.get_host() + "']",
                        "-E", "output.file.enabled=false"]
        )

        beat.check_wait(exit_code=0)

        assert self.log_contains("Kibana dashboards successfully loaded") is True
コード例 #9
0
ファイル: test_dashboard.py プロジェクト: gshamov/beats
    def test_dev_tool_export_dashboard_by_id_from_space(self):
        """
        Test dev-tools/cmd/dashboards exports dashboard from Kibana space
        and removes unsupported characters
        """
        version = self.get_version()
        if semver.compare(version, "6.5.0") == -1:
            # Skip for Kibana versions < 6.5.0 as Kibana Spaces not available
            raise SkipTest

        self.test_load_dashboard_into_space(False)

        path = os.path.normpath(self.beat_path + "/../dev-tools/cmd/dashboards/export_dashboards.go")
        command = path + " -kibana http://" + self.get_kibana_host() + ":" + self.get_kibana_port()
        command = "go run " + command + " -dashboard Metricbeat-system-overview -space-id foo-bar"

        p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        content, err = p.communicate()

        assert p.returncode == 0

        assert os.path.isfile("output.json") is True

        with open('output.json') as f:
            content = f.read()
            assert "Metricbeat-system-overview" in content

        os.remove("output.json")
コード例 #10
0
ファイル: utils.py プロジェクト: aromatix/fabric_navitia
def show_version(action='show', host='eng'):
    """
    prints, gets or checks versions (installed and candidate) from navitia-kraken/navitia-tyr/navitia-jormungandr package
    show: print versions on stdout
    get: returns tuple (installed, candidate) or (None, None) if navitia-kraken/tyr/jormungandr not installed on target,
         installed and candidate can be tuples if different versions are coexisting
    check: return True if candidate version is different from installed
    """
    versions = execute(get_version, host)
    def summarize(iterable):
        s = tuple(set(iterable))
        if len(s) == 1:
            return s[0]
        return s
    if action == 'show':
        print(green(host_app_mapping[host]))
        for k, v in versions.iteritems():
            print(green("  %s, installed: %s, candidate: %s" % (k, v[0], v[1])))
    elif action == 'get':
        installed = summarize(x[0] for x in versions.itervalues())
        candidate = summarize(x[1] for x in versions.itervalues())
        return installed, candidate
    elif action == 'check':
        if env.manual_package_deploy:
            print(yellow("WARNING Can't check versions of manually installed packages"))
            return True
        installed = summarize(x[0] for x in versions.itervalues())
        candidate = summarize(x[1] for x in versions.itervalues())
        if isinstance(installed, tuple):
            installed = max(installed)
        return semver.compare(candidate, installed) > 0
コード例 #11
0
ファイル: lobbyconnection.py プロジェクト: FAForever/server
    def check_version(self, message):
        versionDB, updateFile = self.player_service.client_version_info
        update_msg = dict(command="update",
                          update=updateFile,
                          new_version=versionDB)

        self.user_agent = message.get('user_agent')
        version = message.get('version')
        server.stats.gauge('user.agents.None', -1, delta=True)
        server.stats.gauge('user.agents.{}'.format(self.user_agent), 1, delta=True)

        if not version or not self.user_agent:
            update_msg['command'] = 'welcome'
            # For compatibility with 0.10.x updating mechanism
            self.sendJSON(update_msg)
            return False

        # Check their client is reporting the right version number.
        if 'downlords-faf-client' not in self.user_agent:
            try:
                if "-" in version:
                    version = version.split('-')[0]
                if "+" in version:
                    version = version.split('+')[0]
                if semver.compare(versionDB, version) > 0:
                    self.sendJSON(update_msg)
                    return False
            except ValueError:
                self.sendJSON(update_msg)
                return False
        return True
コード例 #12
0
ファイル: lobbyconnection.py プロジェクト: yorick-ne/server
    def check_version(self, message):
        versionDB, updateFile = self.player_service.client_version_info
        update_msg = dict(command="update",
                          update=updateFile,
                          new_version=versionDB)

        if 'version' not in message or 'user_agent' not in message:
            update_msg['command'] = 'welcome'
            # For compatibility with 0.10.x updating mechanism
            self.sendJSON(update_msg)
            return False

        version = message.get('version')

        # Check their client is reporting the right version number.
        if message.get('user_agent', None) != 'downlords-faf-client':
            try:
                if "-" in version:
                    version = version.split('-')[0]
                if "+" in version:
                    version = version.split('+')[0]
                if semver.compare(versionDB, version) > 0:
                    self.sendJSON(update_msg)
                    return False
            except ValueError:
                self.sendJSON(update_msg)
                return False
        return True
コード例 #13
0
 def test_should_follow_specification_comparison(self):
     # produce comparsion chain:
     # 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-beta.2 < 1.0.0-beta.11
     # < 1.0.0-rc.1 < 1.0.0-rc.1+build.1 < 1.0.0 < 1.0.0+0.3.7 < 1.3.7+build
     # < 1.3.7+build.2.b8f12d7 < 1.3.7+build.11.e0f985a
     # and in backward too.
     chain = ['1.0.0-alpha', '1.0.0-alpha.1', '1.0.0-beta.2',
              '1.0.0-beta.11', '1.0.0-rc.1',
              '1.0.0', '1.3.7+build']
     versions = zip(chain[:-1], chain[1:])
     for low_version, high_version in versions:
         self.assertEqual(
             compare(low_version, high_version), -1,
             '%s should be lesser than %s' % (low_version, high_version))
         self.assertEqual(
             compare(high_version, low_version), 1,
             '%s should be higher than %s' % (high_version, low_version))
コード例 #14
0
ファイル: git_utils.py プロジェクト: reubano/ongeza
 def tags(self):
     """
         :returns: list of git tags, sorted by the version number.
     """
     cmd = 'git tag'
     tags = self.sh(cmd, True).split('\n')
     compare = lambda x, y: semver.compare(x.lstrip('v'), y.lstrip('v'))
     return sorted(tags, key=cmp_to_key(compare))
コード例 #15
0
ファイル: tests.py プロジェクト: jeloagnasin/python-semver
def test_should_follow_specification_comparison():
    """
    produce comparison chain:
    1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-beta.2 < 1.0.0-beta.11
    < 1.0.0-rc.1 < 1.0.0-rc.1+build.1 < 1.0.0 < 1.0.0+0.3.7 < 1.3.7+build
    < 1.3.7+build.2.b8f12d7 < 1.3.7+build.11.e0f985a
    and in backward too.
    """
    chain = [
        '1.0.0-alpha', '1.0.0-alpha.1', '1.0.0-beta.2', '1.0.0-beta.11',
        '1.0.0-rc.1', '1.0.0', '1.3.7+build',
    ]
    versions = zip(chain[:-1], chain[1:])
    for low_version, high_version in versions:
        assert compare(low_version, high_version) == -1, \
            '%s should be lesser than %s' % (low_version, high_version)
        assert compare(high_version, low_version) == 1,  \
            '%s should be higher than %s' % (high_version, low_version)
コード例 #16
0
ファイル: test_elasticsearch.py プロジェクト: 7AC/beats
    def check_skip(self, metricset, es):
        if metricset != "ccr":
            return

        version = self.get_version(es)
        if semver.compare(version, "6.5.0") == -1:
            # Skip CCR metricset system test for Elasticsearch versions < 6.5.0 as CCR Stats
            # API endpoint is not available
            raise SkipTest("elasticsearch/ccr metricset system test only valid with Elasticsearch versions >= 6.5.0")
コード例 #17
0
ファイル: version_check.py プロジェクト: 13768324554/redash
def _compare_and_update(latest_version):
    # TODO: support alpha channel (allow setting which channel to check & parse build number)
    is_newer = semver.compare(current_version, latest_version) == -1
    logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)

    if is_newer:
        redis_connection.set(REDIS_KEY, latest_version)
    else:
        redis_connection.delete(REDIS_KEY)
コード例 #18
0
ファイル: gui.py プロジェクト: spark/device-updater
    def _can_upgrade(self, device, device_version):
        if device.platform != 'electron':
            return True

        if not device_version or semver.compare(device_version, '0.5.3') < 0:
            self.electron_upgrade_popup.open()
            return False

        return True
コード例 #19
0
ファイル: commands.py プロジェクト: hamon-in/invoice
 def update(self):
     sess = model.get_session(self.args['db'])
     db_version = sess.query(model.Config).filter(model.Config.name == "version").one().value
     sw_version = __version__
     alembic_cfg = helpers.get_alembic_config(self.args['db'])
     
     self.l.debug("Software version %s", sw_version)
     self.l.debug("Database version %s", db_version)
     if semver.compare(db_version, sw_version) == -1:
         command.upgrade(alembic_cfg, "head")
         version = sess.query(model.Config).filter(model.Config.name == "version").one()
         version.value = __version__
         sess.add(version)
         sess.commit()
         self.l.info("Database older than software. Updated to %s", __version__)
     elif semver.compare(db_version, __version__) == 1:
         self.l.info("Database newer than software. Please upgrade the application.")
     else:
         self.l.info("No updates necessary.")
コード例 #20
0
ファイル: tests.py プロジェクト: jeloagnasin/python-semver
def test_should_raise_value_error_for_invalid_value():
    with pytest.raises(ValueError):
        compare('foo', 'bar')
    with pytest.raises(ValueError):
        compare('1.0', '1.0.0')
    with pytest.raises(ValueError):
        compare('1.x', '1.0.0')
コード例 #21
0
ファイル: commands.py プロジェクト: hamon-in/invoice
 def migrate(self):
     sess = model.get_session(self.args['db'])
     db_version = sess.query(model.Config).filter(model.Config.name == "version").one().value
     sw_version = __version__
     alembic_cfg = helpers.get_alembic_config(self.args['db'])
     
     self.l.debug("Software version %s", sw_version)
     self.l.debug("Database version %s", db_version)
     if semver.compare(db_version, sw_version) == -1:
         command.revision(alembic_cfg, "head", autogenerate=True, rev_id=sw_version)
         self.l.info("New migration created from %s to %s", db_version, sw_version)
     else:
         self.l.info("Migrations not nececssary. Database is not older than software")
コード例 #22
0
    def compare_versions(self, version1, version2):
        """
        Compares to version strings to see which is greater

        Date-based version numbers (used by GitHub and BitBucket providers)
        are automatically pre-pended with a 0 so they are always less than
        version 1.0.

        :return:
            -1  if version1 is less than version2
             0  if they are equal
             1  if version1 is greater than version2
        """

        def date_compat(v):
            # We prepend 0 to all date-based version numbers so that developers
            # may switch to explicit versioning from GitHub/BitBucket
            # versioning based on commit dates
            date_match = re.match('(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$', v)
            if date_match:
                v = '0.%s.%s.%s.%s.%s.%s' % date_match.groups()
            return v

        def semver_compat(v):
            # When translating dates into semver, the way to get each date
            # segment into the version is to treat the year and month as
            # minor and patch, and then the rest as a numeric build version
            # with four different parts. The result looks like:
            # 0.2012.11+10.31.23.59
            date_match = re.match('(\d{4}(?:\.\d{2}){2})\.(\d{2}(?:\.\d{2}){3})$', v)
            if date_match:
                v = '%s+%s' % (date_match.group(1), date_match.group(2))

            # Semver must have major, minor, patch
            elif re.match('^\d+$', v):
                v += '.0.0'
            elif re.match('^\d+\.\d+$', v):
                v += '.0'
            return v

        def cmp_compat(v):
            return [int(x) for x in re.sub(r'(\.0+)*$', '', v).split(".")]

        version1 = date_compat(version1)
        version2 = date_compat(version2)
        try:
            return semver.compare(semver_compat(version1), semver_compat(version2))
        except (ValueError):
            cv1 = cmp_compat(version1)
            cv2 = cmp_compat(version2)
            return (cv1 > cv2) - (cv1 < cv2)
コード例 #23
0
def main(version_file, site_folder):
    running_version = reviewboard.get_version_string()

    try:
        with open(version_file) as f:
            previous_version = f.readline().strip()
    except IOError:
        previous_version = "0.0.0"

    if semver.compare(running_version, previous_version) == 1:
        print("ReviewBoard upgrade detected, performing rb-site upgrade")
        subprocess.check_call(["rb-site", "upgrade", site_folder])
        with open(version_file, 'w') as f:
            f.write(running_version)
コード例 #24
0
    def compare_versions(self, version1, version2):
        """
        Compares to version strings to see which is greater

        Date-based version numbers (used by GitHub and BitBucket providers)
        are automatically pre-pended with a 0 so they are always less than
        version 1.0.

        :return:
            -1  if version1 is less than version2
             0  if they are equal
             1  if version1 is greater than version2
        """

        def semver_compat(v):
            # We prepend 0 to all date-based version numbers so that developers
            # may switch to explicit versioning from GitHub/BitBucket
            # versioning based on commit dates.
            #
            # When translating dates into semver, the way to get each date
            # segment into the version is to treat the year and month as
            # minor and patch, and then the rest as a numeric build version
            # with four different parts. The result looks like:
            # 0.2012.11+10.31.23.59
            date_match = re.match('(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$', v)
            if date_match:
                v = '0.%s.%s+%s.%s.%s.%s' % date_match.groups()

            # This handles version that were valid pre-semver with 4+ dotted
            # groups, such as 1.6.9.0
            four_plus_match = re.match('(\d+\.\d+\.\d+)\.(\d+(\.\d+)*)$', v)
            if four_plus_match:
                v = '%s+%s' % (four_plus_match.group(1), four_plus_match.group(2))

            # Semver must have major, minor, patch
            elif re.match('^\d+$', v):
                v += '.0.0'
            elif re.match('^\d+\.\d+$', v):
                v += '.0'
            return v

        try:
            return semver.compare(semver_compat(version1), semver_compat(version2))
        except (ValueError) as e:
            console_write(u"Error comparing versions - %s" % e, True)
            return 0
コード例 #25
0
ファイル: main.py プロジェクト: averrin/diaboli-ex
    def __init__(self):
        QMainWindow.__init__(self)
        self.resize(800, 600)
        self.setWindowTitle('Diaboli Ex')

        home = os.path.expanduser('~')
        self.worldPath = os.path.join(home, '.diaboli-ex.json')
        self.cwd = CWD
        self.local = LOCAL

        if os.path.isfile(self.worldPath):
            world = json.load(open(self.worldPath, 'r'))
            if 'version' not in world:
                world['version'] = '0.0.0'
            if semver.compare(VERSION, world['version']) > 0:
                v1 = world['version'].split('.')
                v0 = VERSION.split('.')
                if v0[0] != v1[0] or v0[1] != v1[1]:
                    self.initNewWorld()
                    world = json.load(open(self.worldPath, 'r'))
        else:
            self.initNewWorld()
            world = json.load(open(self.worldPath, 'r'))

        world['launches'] += 1
        world['version'] = VERSION
        self.world = AttrDict(world)
        self.saveWorld()
        if self.local:
            self.achievements = json.load(open(os.path.join(CWD, 'data', 'achievements.json'), 'r', encoding='utf8'))
        else:
            self.achievements = requests.get(BASE_URL + 'data/achievements.json').json()

        self.tabs = QTabWidget()
        self.view = QWebView(self)

        self.view.page().mainFrame().javaScriptWindowObjectCleared.connect(self.injectObjects)

        if DEBUG:
            self.showEditor()
        else:
            self.setCentralWidget(self.view)

        self.createContext()
        self.loadWorld()
コード例 #26
0
ファイル: version.py プロジェクト: pombredanne/versionner
    def _cmp(self, other):
        """
        Compare versions

        :rtype : any
        :param other: version as any recognizable type
        :return: int
        """
        if isinstance(other, self.__class__):
            v1, v2 = str(self), str(other)
        elif isinstance(other, dict):
            v1, v2 = str(self), str(self.__class__(other))
        elif isinstance(other, str):
            v1, v2 = str(self), other
        else:
            return NotImplemented

        return semver.compare(v1, v2)
コード例 #27
0
    def find_a_release(self):
        if self.version == "latest" and self.release_type == "release":
            return self.repository.latest_release()
        elif self.version == "latest" and self.release_type == "any":
            # We need to filter
            pass
        elif self.version == "latest":
            # We need to filter
            pass
        elif self.version != "latest" and self.release_type == "draft":
            # Specify a draft release version 
            for release in self.repository.releases():
                if release.tag_name == self.version:
                    return release
        else:
            # Get a specific release not latest
            release_from_tag = self.repository.release_from_tag(self.version)
            if release_from_tag:
                return release_from_tag
            else:
                # Failed to find tag
                self.module.fail_json(msg="failed to find version {} in repo {}".format(self.version, self.full_repo))

        latest_release = type('obj', (object,), {'tag_name': '0.0.0'})

        for release in self.repository.releases():
            if self.release_type == "any":
                pass
                # don't filter by type
            elif getattr(release, self.release_type):
                try:
                    if semver.compare(release.tag_name, latest_release.tag_name) == 1:
                        latest_release = release
                except ValueError as e:
                    self.module.fail_json(msg="{}".format(e))

        if latest_release.tag_name == '0.0.0':
            self.module.fail_json(msg="failed to find latest release type {} in repo {}"
                                  .format(self.release_type, self.full_repo))

        return latest_release
コード例 #28
0
def main():
    """lets start our task"""
    # clone the repo
    cleanup(LOCAL_WORK_COPY)
    try:
        r = Repo.clone_from(git_url, LOCAL_WORK_COPY)
    except GitCommandError as git_error:
        print(git_error)
        exit(-1)

    d = feedparser.parse(
        'https://github.com/mattermost/mattermost-server/releases.atom')
    release_version = d.entries[0].title[1:]

    # lets read the dockerfile of the current master
    dfp = DockerfileParser()

    with open('./mattermost-openshift-workdir/Dockerfile') as f:
        dfp.content = f.read()

    if 'MATTERMOST_VERSION' in dfp.envs:
        dockerfile_version = dfp.envs['MATTERMOST_VERSION']

    # Lets check if we got a new release
    if semver.compare(release_version, dockerfile_version) == 1:
        print("Updating from %s to %s" % (dockerfile_version, release_version))

        target_branch = 'bots-life/update-to-' + release_version

        if not pr_in_progress(target_branch):
            patch_and_push(dfp, r, target_branch, release_version)
            cleanup(LOCAL_WORK_COPY)

            create_pr_to_master(target_branch)
        else:
            print("There is an open PR for %s, aborting..." %
                  (target_branch))

    else:
        print("we are even with Mattermost %s, no need to update" %
              release_version)
コード例 #29
0
    def get_latest_release(self):
        response = requests.get(self.releases_url)
        releases = response.json()

        latest_version = None
        for r in releases:
            if r["prerelease"] and not self.include_prereleases:
                continue
            version = r["tag_name"]
            # some tag was create with 'v' letter, we should remove it (ex. v0.0.3-alpha -> 0.0.3-alpha)
            if version[0] == "v":
                version = version[1:]

            # latest version of semver can't parse 0 per-release version (ex: 1.0.0-rc.0)
            try:
                semver.parse(version)
            except ValueError:
                continue

            if latest_version is None or semver.compare(version, latest_version) > 0:
                latest_version = version
        return latest_version
コード例 #30
0
ファイル: xml.py プロジェクト: mangalam-research/btw
def get_supported_schema_versions():
    """
    Returns a list of schema versions that we support.
    """
    global _supported_schema_versions  # pylint: disable=global-statement

    if _supported_schema_versions is not None:
        return _supported_schema_versions

    versions = []
    outdir = os.path.join(schemas_dirname, "out")
    for d in (d for d in os.listdir(outdir)
              if os.path.isdir(os.path.join(outdir, d))):
        match = schema_dir_re.match(d)
        if match:
            version = match.group(1)
            versions.append(version)

    # We don't actually always have the patch number that semver
    # expects, so normalize.
    def norm(x):
        return (x + ".0") if x.count(".") == 1 else x

    versions = sorted(versions, lambda a, b: semver.compare(norm(a),
                                                            norm(b)))

    # We support validating all versions that we find but we can
    # revert only to the last one.
    ret = OrderedDict()
    for v in versions[:-1]:
        ret[v] = VersionInfo(can_validate=True, can_revert=False)

    ret[versions[-1]] = VersionInfo(can_validate=True, can_revert=True)

    _supported_schema_versions = ret

    return ret
コード例 #31
0
                        rancher_compose = yaml.load(f)
                        version_dirs[service_version_dir] = rancher_compose
                except yaml.YAMLError, exc:
                    print "Error in rancher-compose.yml file: ", exc
            else:
                print version_dir + ": missing rancher-compose.yml"
    # print_keys("Unfiltered:", version_dirs)

    # Filter version dirs by min/max rancher version
    filtered = {}
    for key, value in version_dirs.iteritems():
        if '.catalog' in value:
            catalog = value['.catalog']
            if 'minimum_rancher_version' in catalog:
                min_version = catalog['minimum_rancher_version'].lstrip('v')
                if semver.compare(rancher_version, min_version) < 0:
                    continue
            if 'maximum_rancher_version' in catalog:
                max_version = catalog['maximum_rancher_version'].lstrip('v')
                if semver.compare(rancher_version, max_version) > 0:
                    continue
        filtered[key] = value
    # print_keys("Server Version:", filtered)

    # Bail out if only one remains
    if len(filtered) == 1:
        for key, value in filtered.iteritems():
            return key, value['.catalog']['version']
        return list(filtered)[0]

    # Try to return the template version in config.yml
コード例 #32
0
ファイル: tool.py プロジェクト: huoshuaibing/picbed
def less_latest_tag(latest_tag):
    """当前应用是否小于GitHub最新版本比较"""
    if latest_tag and is_valid_verion(latest_tag):
        return semver.compare(latest_tag, PICBED_VERSION) == 1
コード例 #33
0
ファイル: package_stub.py プロジェクト: dav009/sputnik
 def __lt__(self, other):
     self._error_on_different_name(other)
     return semver.compare(self.version, other.version) < 0
コード例 #34
0
def test_pandas():
    import pandas as pd
    assert semver.compare(pd.__version__, '0.18.0') == 1
コード例 #35
0
def compareVersion(latestVersion, currentVersion):
    return semver.compare(latestVersion, currentVersion)
コード例 #36
0
async def fetch_and_update_release(app,
                                   ref_id: str,
                                   ignore_errors: bool = False) -> dict:
    """
    Get the latest release for the GitHub repository identified by the passed `slug`. If a release is found, update the
    reference identified by the passed `ref_id` and return the release.

    Exceptions can be ignored during the GitHub request. Error information will still be written to the reference
    document.

    :param app: the application object
    :param ref_id: the id of the reference to update
    :param ignore_errors: ignore exceptions raised during GitHub request
    :return: the latest release

    """
    db = app["db"]

    retrieved_at = virtool.utils.timestamp()

    document = await db.references.find_one(
        ref_id, ["installed", "release", "remotes_from"])

    release = document.get("release")
    etag = virtool.github.get_etag(release)

    # Variables that will be used when trying to fetch release from GitHub.
    errors = list()
    updated = None

    try:
        updated = await virtool.github.get_release(
            app["settings"], app["client"], document["remotes_from"]["slug"],
            etag)

        if updated:
            updated = virtool.github.format_release(updated)

    except (aiohttp.ClientConnectorError, virtool.errors.GitHubError) as err:
        if "ClientConnectorError" in str(err):
            errors = ["Could not reach GitHub"]

        if "404" in str(err):
            errors = ["GitHub repository or release does not exist"]

        if errors and not ignore_errors:
            raise

    if updated:
        release = updated

    if release:
        installed = document["installed"]

        release["newer"] = bool(installed and semver.compare(
            release["name"].lstrip("v"), installed["name"].lstrip("v")) == 1)

        release["retrieved_at"] = retrieved_at

    await db.references.update_one(
        {"_id": ref_id}, {"$set": {
            "errors": errors,
            "release": release
        }})

    return release
コード例 #37
0
ファイル: tests.py プロジェクト: isabella232/python-semver
def test_should_compare_release_candidate_with_release():
    assert compare('1.0.0-rc.1', '1.0.0') == -1
    assert compare('1.0.0-rc.1+build.1', '1.0.0') == -1
コード例 #38
0
ファイル: tests.py プロジェクト: isabella232/python-semver
def test_should_compare_rc_builds():
    assert compare('1.0.0-beta.2', '1.0.0-beta.11') == -1
コード例 #39
0
def contracts_version_with_max_token_networks(version: Optional[str]) -> bool:
    if version is None:
        # contracts_version == None means the stock version in development.
        return True
    return compare(version, "0.9.0") >= 0
コード例 #40
0
def contracts_version_has_initial_service_deposit(
        version: Optional[str]) -> bool:
    if version is None:
        # contracts_versoin == None means the stock version in development.
        return True
    return compare(version, "0.18.0") > 0
コード例 #41
0
def contracts_version_provides_services(version: Optional[str]) -> bool:
    if version is None:
        # contracts_version == None means the stock version in development.
        return True
    return compare(version, "0.8.0") >= 0
コード例 #42
0
    def old_apache_version(self):
        if not 'APACHE_VERSION' in self.COMPOSE_ENV:
            return False

        version = self.COMPOSE_ENV['APACHE_VERSION']
        return semver.compare(version, '2.4.12') <= 0
コード例 #43
0
    def flash(self):
        """
        Takes the currently active tab, compiles the Python script therein into
        a hex file and flashes it all onto the connected device.

        WARNING: This method is getting more complex due to several edge
        cases. Ergo, it's a target for refactoring.
        """
        user_defined_microbit_path = None
        self.python_script = ''
        logger.info('Preparing to flash script.')
        # The first thing to do is check the script is valid and of the
        # expected length.
        # Grab the Python script.
        tab = self.view.current_tab
        if tab is None:
            # There is no active text editor. Exit.
            return
        # Check the script's contents.
        python_script = tab.text().encode('utf-8')
        logger.debug('Python script:')
        logger.debug(python_script)
        # Check minification status.
        minify = False
        if mflash.get_minifier():
            minify = self.editor.minify
        # Attempt and handle minification.
        if len(python_script) >= mflash._MAX_SIZE:
            message = _('Unable to flash "{}"').format(tab.label)
            if minify and can_minify:
                orginal = len(python_script)
                script = python_script.decode('utf-8')
                try:
                    mangled = nudatus.mangle(script).encode('utf-8')
                except TokenError as e:
                    msg, (line, col) = e.args
                    logger.debug('Minify failed')
                    logger.exception(e)
                    message = _("Problem with script")
                    information = _("{} [{}:{}]").format(msg, line, col)
                    self.view.show_message(message, information, 'Warning')
                    return
                saved = orginal - len(mangled)
                percent = saved / orginal * 100
                logger.debug(
                    'Script minified, {} bytes ({:.2f}%) saved:'.format(
                        saved, percent))
                logger.debug(mangled)
                python_script = mangled
                if len(python_script) >= 8192:
                    information = _("Our minifier tried but your "
                                    "script is too long!")
                    self.view.show_message(message, information, 'Warning')
                    return
            elif minify and not can_minify:
                information = _("Your script is too long and the minifier"
                                " isn't available")
                self.view.show_message(message, information, 'Warning')
                return
            else:
                information = _("Your script is too long!")
                self.view.show_message(message, information, 'Warning')
                return
        # By this point, there's a valid Python script in "python_script".
        # Assign this to an attribute for later processing in a different
        # method.
        self.python_script = python_script
        # Next step: find the microbit port and serial number.

        path_to_mini = mflash.find_mini()
        logger.warning('found mini...')
        logger.info('Path to mini: {}'.format(path_to_mini))
        port = None
        serial_number = None

        try:
            port, serial_number = self.find_device()
            logger.info('Serial port: {}'.format(port))
            logger.info('Device serial number: {}'.format(serial_number))
        except Exception as ex:
            logger.warning('Unable to make serial connection to mini.')
            logger.warning(ex)
        # Determine the location of the mini mini. If it can't be found
        # fall back to asking the user to locate it.
        user_defined_microbit_path = path_to_mini
        if path_to_mini is None:
            # Ask the user to locate the device.
            path_to_mini = self.view.get_calliopemini_path(HOME_DIRECTORY)
            user_defined_microbit_path = path_to_mini
            logger.debug('User defined path to mini: {}'.format(
                user_defined_microbit_path))
        # Check the path and that it exists simply because the path maybe based
        # on stale data.

        if path_to_mini and os.path.exists(path_to_mini):
            force_flash = False  # If set to true, fully flash the device.
            # If there's no port but there's a path_to_mini, then we're
            # probably running on Windows with an old device, so force flash.
            if not port:
                force_flash = True
            if not self.python_script.strip():
                # If the script is empty, this is a signal to simply force a
                # flash.
                logger.info("Python script empty. Forcing flash.")
                force_flash = True
            logger.info("Checking target device.")

            # Get the version of MicroPython on the device.
            try:
                version_info = minifs.version()
                logger.info(version_info)
                board_info = version_info['version'].split()
                if (board_info[0] == 'mini' and board_info[1].startswith('v')):
                    # New style versions, so the correct information will be
                    # in the "release" field.
                    try:
                        # Check the release is a correct semantic version.
                        semver.parse(version_info['release'])
                        board_version = version_info['release']
                    except ValueError:
                        # If it's an invalid semver, set to unknown version to
                        # force flash.
                        board_version = '0.0.1'
                else:
                    # 0.0.1 indicates an old unknown version. This is just a
                    # valid arbitrary flag for semver comparison a couple of
                    # lines below.
                    board_version = '0.0.1'
                logger.info('Board MicroPython: {}'.format(board_version))
                logger.info('Mu MicroPython: {}'.format(
                    mflash.MICROPYTHON_VERSION))
                # If there's an older version of MicroPython on the device,
                # update it with the one packaged with Mu.
                if semver.compare(board_version,
                                  mflash.MICROPYTHON_VERSION) < 0:
                    force_flash = True
            except Exception:
                # Could not get version of MicroPython. This means either the
                # device has a really old version of MicroPython or is running
                # something else. In any case, flash MicroPython onto the
                # device.
                logger.warn('Could not detect version of MicroPython.')
                force_flash = True
            # Check use of custom runtime.
            rt_hex_path = self.editor.mini_runtime.strip()
            # rt_hex_path = "mini_runtime.hex"
            # rt_hex_path = os.path.dirname(os.path.realpath(__file__))+"/mini_runtime.hex"
            # self.view.show_message("hex path used", rt_hex_path)
            # rt_hex_path = os.path.join(dir,"")
            message = _('Flashing "{}" onto the mini.').format(tab.label)

            if (rt_hex_path and os.path.exists(rt_hex_path)):
                message = message + _(" Runtime: {}").format(rt_hex_path)
                #self.view.show_message("hex path used", rt_hex_path)
                force_flash = True  # Using a custom runtime, so flash it.
            else:
                # self.view.show_message("hex path not used", rt_hex_path)
                rt_hex_path = None
                self.editor.mini_runtime = ''
            # Check for use of user defined path (to save hex onto local
            # file system.
            if user_defined_microbit_path:
                force_flash = True

            # If we need to flash the device with a clean hex, do so now.
            if force_flash:
                logger.info('Flashing new MicroPython runtime onto device')
                self.editor.show_status_message(message, 10)
                self.set_buttons(flash=False)
                if user_defined_microbit_path or not port:
                    # The user has provided a path to a location on the
                    # filesystem. In this case save the combined hex/script
                    # in the specified path_to_mini.
                    # Or... Mu has a path to a mini but can't establish
                    # a serial connection, so use the combined hex/script
                    # to flash the device.

                    self.flash_thread = DeviceFlasher([path_to_mini],
                                                      self.python_script,
                                                      rt_hex_path)
                    # Reset python_script so Mu doesn't try to copy it as the
                    # main.py file.
                    self.python_script = ''

                else:
                    # We appear to need to flash a connected mini device,
                    # so just flash the Python hex with no embedded Python
                    # script, since this will be copied over when the
                    # flashing operation has finished.
                    model_serial_number = serial_number[:4]
                    if rt_hex_path:
                        # If the user has specified a bespoke runtime hex file
                        # assume they know what they're doing and hope for the
                        # best.
                        self.flash_thread = DeviceFlasher([path_to_mini], b'',
                                                          rt_hex_path)
                    elif model_serial_number in self.valid_serial_numbers:
                        # The connected board has a serial number that
                        # indicates the MicroPython hex bundled with Mu
                        # supports it. In which case, flash it.
                        self.flash_thread = DeviceFlasher([path_to_mini], b'',
                                                          None)
                    else:
                        message = _('Unsupported mini.')
                        information = _("Your device is newer than this "
                                        "version of Mu. Please update Mu "
                                        "to the latest version to support "
                                        "this device.\n\n"
                                        "https://codewith.mu/")
                        self.view.show_message(message, information)
                        return
                if sys.platform == 'win32':
                    # Windows blocks on write.
                    self.flash_thread.finished.connect(self.flash_finished)
                else:
                    if user_defined_microbit_path:
                        # Call the flash_finished immediately the thread
                        # finishes if Mu is writing the hex file to a user
                        # defined location on the local filesystem.
                        self.flash_thread.finished.connect(self.flash_finished)
                    else:
                        # Other platforms don't block, so schedule the finish
                        # call for 10 seconds (approximately how long flashing
                        # the connected device takes).
                        self.flash_timer = QTimer()
                        self.flash_timer.timeout.connect(self.flash_finished)
                        self.flash_timer.setSingleShot(True)
                        self.flash_timer.start(20000)
                self.flash_thread.on_flash_fail.connect(self.flash_failed)
                self.flash_thread.start()
            else:
                try:
                    self.copy_main()
                except IOError as ioex:
                    # There was a problem with the serial communication with
                    # the device, so revert to forced flash... "old style".
                    # THIS IS A HACK! :-(
                    logger.warning('Could not copy file to device.')
                    logger.error(ioex)
                    logger.info('Falling back to old-style flashing.')
                    self.flash_thread = DeviceFlasher([path_to_mini],
                                                      self.python_script,
                                                      rt_hex_path)
                    self.python_script = ''
                    if sys.platform == 'win32':
                        # Windows blocks on write.
                        self.flash_thread.finished.connect(self.flash_finished)
                    else:
                        self.flash_timer = QTimer()
                        self.flash_timer.timeout.connect(self.flash_finished)
                        self.flash_timer.setSingleShot(True)
                        self.flash_timer.start(20000)
                    self.flash_thread.on_flash_fail.connect(self.flash_failed)
                    self.flash_thread.start()
                except Exception as ex:
                    self.flash_failed(ex)
        else:
            # Try to be helpful... essentially there is nothing Mu can do but
            # prompt for patience while the device is mounted and/or do the
            # classic "have you tried switching it off and on again?" trick.
            # This one's for James at the Raspberry Pi Foundation. ;-)
            message = _('Could not find an attached mini.')
            information = _("Please ensure you leave enough time for the"
                            " mini to be attached and configured"
                            " correctly by your computer. This may take"
                            " several seconds."
                            " Alternatively, try removing and re-attaching the"
                            " device or saving your work and restarting Mu if"
                            " the device remains unfound.")
            self.view.show_message(message, information)
コード例 #44
0
ファイル: main.py プロジェクト: DNXLabs/docker-kube-tools
        'Accept': 'application/vnd.github.v3+json'
    }

    response_new_release = requests.post(
        'https://api.github.com/repos/DNXLabs/docker-kube-tools/releases',
        data=json.dumps(data),
        headers=headers
    )


if __name__ == "__main__":
    # Kubectl
    kubectl_latest = get_kubectl_latest_version()
    print('Kubectl upstream version: %s' % kubectl_latest)
    print('DNX Kubectl version: %s' % control['kubectl_version'])
    if semver.compare(kubectl_latest, control['kubectl_version']) == 1:
        print('Rendering template for Kubectl.')
        render_template(tag_kubectl=kubectl_latest)
        control['kubectl_version'] = kubectl_latest
        update_control(control)
        commit_message = 'Bump Kubectl version to v%s' % kubectl_latest
        add_commit_push(commit_message)
        release_changes = True
        release_message += "- %s.\r\n" % commit_message
    else:
        print('Nothing to do, the upstream is in the same version or lower version.')

    print('--------------------------------')

    # Helm
    helm_latest = get_helm_latest_version()
コード例 #45
0
def run(dry_run, gitlab_project_id=None, thread_pool_size=10):
    settings = queries.get_app_interface_settings()
    clusters = queries.get_clusters()
    clusters = [c for c in clusters if c.get('ocm') is not None]
    ocm_map = ocmmod.OCMMap(
        clusters=clusters, integration=QONTRACT_INTEGRATION,
        settings=settings, init_provision_shards=True)
    current_state, pending_state = ocm_map.cluster_specs()
    desired_state = fetch_desired_state(clusters)

    if not dry_run:
        mr_cli = mr_client_gateway.init(gitlab_project_id=gitlab_project_id)

    error = False
    clusters_updates = {}
    for cluster_name, desired_spec in desired_state.items():
        current_spec = current_state.get(cluster_name)
        if current_spec:
            clusters_updates[cluster_name] = {'spec': {}, 'root': {}}
            cluster_path = 'data' + \
                [c['path'] for c in clusters
                 if c['name'] == cluster_name][0]

            # validate version
            desired_spec['spec'].pop('initial_version')
            desired_version = desired_spec['spec'].pop('version')
            current_version = current_spec['spec'].pop('version')
            compare_result = 1  # default value in case version is empty
            if desired_version:
                compare_result = \
                    semver.compare(current_version, desired_version)
            if compare_result > 0:
                # current version is larger due to an upgrade.
                # submit MR to update cluster version
                logging.info(
                    '[%s] desired version %s is different ' +
                    'from current version %s. ' +
                    'version will be updated automatically in app-interface.',
                    cluster_name, desired_version, current_version)
                clusters_updates[cluster_name]['spec']['version'] = current_version  # noqa: E501
            elif compare_result < 0:
                logging.error(
                    f'[{cluster_name}] version {desired_version} ' +
                    f'is different from current version {current_version}. ' +
                    f'please correct version to be {current_version}, ' +
                    'as this field is only meant for tracking purposes. ' +
                    'upgrades are determined by ocm-upgrade-scheduler.')
                error = True

            if not desired_spec['spec'].get('id'):
                clusters_updates[cluster_name]['spec']['id'] = \
                    current_spec['spec']['id']

            if not desired_spec['spec'].get('external_id'):
                clusters_updates[cluster_name]['spec']['external_id'] = \
                    current_spec['spec']['external_id']

            if not desired_spec.get('consoleUrl'):
                clusters_updates[cluster_name]['root']['consoleUrl'] = \
                    current_spec['console_url']

            if not desired_spec.get('serverUrl'):
                clusters_updates[cluster_name]['root']['serverUrl'] = \
                    current_spec['server_url']

            if not desired_spec.get('elbFQDN'):
                clusters_updates[cluster_name]['root']['elbFQDN'] = \
                    f"elb.apps.{cluster_name}.{current_spec['domain']}"

            desired_provision_shard_id = \
                desired_spec['spec'].get('provision_shard_id')
            current_provision_shard_id = \
                current_spec['spec']['provision_shard_id']
            if desired_provision_shard_id != current_provision_shard_id:
                clusters_updates[cluster_name]['spec']['provision_shard_id'] =\
                    current_provision_shard_id

            if clusters_updates[cluster_name]:
                clusters_updates[cluster_name]['path'] = cluster_path

            # exclude params we don't want to check in the specs
            for k in ['id', 'external_id', 'provision_shard_id']:
                current_spec['spec'].pop(k, None)
                desired_spec['spec'].pop(k, None)

            desired_uwm = desired_spec['spec'].get(ocmmod.DISABLE_UWM_ATTR)
            current_uwm = current_spec['spec'].get(ocmmod.DISABLE_UWM_ATTR)

            if desired_uwm is None and current_uwm is not None:
                clusters_updates[cluster_name]['spec'][ocmmod.DISABLE_UWM_ATTR] =\
                    current_uwm  # noqa: E501

            # check if cluster update, if any, is valid
            update_spec, err = get_cluster_update_spec(
                cluster_name,
                current_spec,
                desired_spec,
            )
            if err:
                logging.warning(f"Invalid changes to spec: {update_spec}")
                error = True
                continue
            # update cluster
            # TODO(mafriedm): check dry_run in OCM API patch
            if update_spec:
                logging.info(['update_cluster', cluster_name])
                logging.debug(
                    '[%s] desired spec %s is different ' +
                    'from current spec %s',
                    cluster_name, desired_spec, current_spec)
                if not dry_run:
                    ocm = ocm_map.get(cluster_name)
                    ocm.update_cluster(cluster_name, update_spec, dry_run)
        else:
            # create cluster
            if cluster_name in pending_state:
                continue
            logging.info(['create_cluster', cluster_name])
            ocm = ocm_map.get(cluster_name)
            ocm.create_cluster(cluster_name, desired_spec, dry_run)

    create_update_mr = False
    for cluster_name, cluster_updates in clusters_updates.items():
        for k, v in cluster_updates['spec'].items():
            logging.info(
                f"[{cluster_name}] desired key in spec " +
                f"{k} will be updated automatically " +
                f"with value {v}."
            )
            create_update_mr = True
        for k, v in cluster_updates['root'].items():
            logging.info(
                f"[{cluster_name}] desired root key {k} will "
                f"be updated automatically with value {v}"
            )
            create_update_mr = True
    if create_update_mr and not dry_run:
        mr = cu.CreateClustersUpdates(clusters_updates)
        mr.submit(cli=mr_cli)

    sys.exit(int(error))
コード例 #46
0
ファイル: tests.py プロジェクト: isabella232/python-semver
def test_should_say_equal_versions_are_equal():
    assert compare('2.0.0', '2.0.0') == 0
    assert compare('1.1.9-rc.1', '1.1.9-rc.1') == 0
    assert compare('1.1.9+build.1', '1.1.9+build.1') == 0
    assert compare('1.1.9-rc.1+build.1', '1.1.9-rc.1+build.1') == 0
コード例 #47
0
async def fetch_and_update_release(app, ignore_errors=False):
    """
    Return the HMM install status document or create one if none exists.

    :param app: the app object
    :type app: :class:`aiohttp.web.Application`

    :param ignore_errors: ignore possible errors when making GitHub request
    :type ignore_errors: bool

    """
    db = app["db"]
    settings = app["settings"]
    session = app["client"]

    document = await db.status.find_one("hmm", ["release", "installed"])

    release = document.get("release", None)

    installed = document.get("installed", None)

    try:
        etag = release["etag"]
    except (KeyError, TypeError):
        etag = None

    errors = list()

    try:
        updated = await virtool.github.get_release(
            settings,
            session,
            settings["hmm_slug"],
            etag
        )

        # The release dict will only be replaced if there is a 200 response from GitHub. A 304 indicates the release
        # has not changed and `None` is returned from `get_release()`.
        if updated:
            release = virtool.github.format_release(updated)

            release["newer"] = bool(
                release is None or installed is None or (
                        installed and
                        semver.compare(release["name"].lstrip("v"), installed["name"].lstrip("v")) == 1
                )
            )

        release["retrieved_at"] = virtool.utils.timestamp()

        # The `errors` list is emptied and the
        await db.status.update_one({"_id": "hmm"}, {
            "$set": {
                "errors": errors,
                "release": release
            }
        }, upsert=True)

        return release

    except (aiohttp.client_exceptions.ClientConnectorError, virtool.errors.GitHubError) as err:

        if "ClientConnectorError" in str(err):
            errors = ["Could not reach GitHub"]

        if "404" in str(err):
            errors = ["GitHub repository or release does not exist"]

        if errors and not ignore_errors:
            raise

        await db.status.update_one({"_id": "hmm"}, {
            "$set": {
                "errors": errors
            }
        })

        return release
コード例 #48
0
ファイル: tests.py プロジェクト: isabella232/python-semver
def test_should_compare_versions_with_build_and_release():
    assert compare('1.1.9-rc.1', '1.1.9-rc.1+build.1') == 0
    assert compare('1.1.9-rc.1', '1.1.9+build.1') == -1
コード例 #49
0
 def __lt__(self, other):
     return semver.compare(self.value, other.value) == -1
コード例 #50
0
ファイル: tests.py プロジェクト: isabella232/python-semver
def test_should_ignore_builds_on_compare():
    assert compare('1.0.0+build.1', '1.0.0') == 0
    assert compare('1.0.0-alpha.1+build.1', '1.0.0-alpha.1') == 0
    assert compare('1.0.0+build.1', '1.0.0-alpha.1') == 1
    assert compare('1.0.0+build.1', '1.0.0-alpha.1+build.1') == 1
コード例 #51
0
def call(context: Context) -> Result:
    arg_work_branch = context.args.get('<work-branch>')
    if arg_work_branch is None:
        branch_prefix = context.args['<supertype>']
        branch_type = context.args['<type>']
        branch_name = context.args['<name>']

        if branch_prefix is not None or branch_type is not None or branch_name is not None:
            arg_work_branch = repotools.create_ref_name(branch_prefix, branch_type, branch_name)

    command_context = get_command_context(
        context=context,
        object_arg=arg_work_branch
    )

    check_in_repo(command_context)

    base_command_context = get_command_context(
        context=context,
        object_arg=context.args['<base-object>']
    )

    check_requirements(command_context=command_context,
                       ref=command_context.selected_ref,
                       branch_classes=[BranchClass.WORK_DEV, BranchClass.WORK_PROD],
                       modifiable=True,
                       with_upstream=True,  # not context.config.push_to_local
                       in_sync_with_upstream=True,
                       fail_message=_("Version creation failed.")
                       )

    work_branch = None
    selected_ref_match = context.work_branch_matcher.fullmatch(command_context.selected_ref.name)
    if selected_ref_match is not None:
        work_branch = WorkBranch()
        work_branch.prefix = selected_ref_match.group('prefix')
        work_branch.type = selected_ref_match.group('type')
        work_branch.name = selected_ref_match.group('name')
    else:
        if command_context.selected_explicitly:
            context.fail(os.EX_USAGE,
                         _("The ref {branch} does not refer to a work branch.")
                         .format(branch=repr(command_context.selected_ref.name)),
                         None)

    work_branch_info = get_branch_info(command_context, work_branch.local_ref_name())
    if work_branch_info is None:
        context.fail(os.EX_USAGE,
                     _("The branch {branch} does neither exist locally nor remotely.")
                     .format(branch=repr(work_branch.branch_name())),
                     None)

    work_branch_ref, work_branch_class = select_ref(command_context.result,
                                                    work_branch_info,
                                                    BranchSelection.BRANCH_PREFER_LOCAL)

    allowed_base_branch_class = const.BRANCHING[work_branch_class]

    base_branch_info = get_branch_info(base_command_context,
                                       base_command_context.selected_ref)

    base_branch_ref, base_branch_class = select_ref(command_context.result,
                                                    base_branch_info,
                                                    BranchSelection.BRANCH_PREFER_LOCAL)
    if not base_command_context.selected_explicitly:
        if work_branch.prefix == const.BRANCH_PREFIX_DEV:
            base_branch_info = get_branch_info(base_command_context,
                                               repotools.create_ref_name(const.LOCAL_BRANCH_PREFIX,
                                                                         context.config.release_branch_base))
            base_branch_ref, base_branch_class = select_ref(command_context.result,
                                                            base_branch_info,
                                                            BranchSelection.BRANCH_PREFER_LOCAL)
        elif work_branch.prefix == const.BRANCH_PREFIX_PROD:
            # discover closest merge base in release branches

            release_branches = repotools.git_list_refs(context.repo,
                                                       repotools.create_ref_name(const.REMOTES_PREFIX,
                                                                                 context.config.remote_name,
                                                                                 'release'))
            release_branches = list(release_branches)
            release_branches.sort(reverse=True, key=utils.cmp_to_key(lambda ref_a, ref_b: semver.compare(
                context.release_branch_matcher.format(ref_a.name),
                context.release_branch_matcher.format(ref_b.name)
            )))
            for release_branch_ref in release_branches:
                merge_base = repotools.git_merge_base(context.repo, base_branch_ref, work_branch_ref.name)
                if merge_base is not None:
                    base_branch_info = get_branch_info(base_command_context, release_branch_ref)

                    base_branch_ref, base_branch_class = select_ref(command_context.result,
                                                                    base_branch_info,
                                                                    BranchSelection.BRANCH_PREFER_LOCAL)
                    break

    if allowed_base_branch_class != base_branch_class:
        context.fail(os.EX_USAGE,
                     _("The branch {branch} is not a valid base for {supertype} branches.")
                     .format(branch=repr(base_branch_ref.name),
                             supertype=repr(work_branch.prefix)),
                     None)

    if base_branch_ref is None:
        context.fail(os.EX_USAGE,
                     _("Base branch undetermined."),
                     None)

    if context.verbose:
        cli.print("branch_name: " + command_context.selected_ref.name)
        cli.print("work_branch_name: " + work_branch_ref.name)
        cli.print("base_branch_name: " + base_branch_ref.name)

    # check, if already merged
    merge_base = repotools.git_merge_base(context.repo, base_branch_ref, work_branch_ref.name)
    if work_branch_ref.obj_name == merge_base:
        cli.print(_("Branch {branch} is already merged.")
                  .format(branch=repr(work_branch_ref.name)))
        return context.result

    # check for staged changes
    index_status = git(context.repo, ['diff-index', 'HEAD', '--'])
    if index_status == 1:
        context.fail(os.EX_USAGE,
                     _("Branch creation aborted."),
                     _("You have staged changes in your workspace.\n"
                       "Unstage, commit or stash them and try again."))
    elif index_status != 0:
        context.fail(os.EX_DATAERR,
                     _("Failed to determine index status."),
                     None)

    if not context.dry_run and not command_context.has_errors():
        # perform merge
        local_branch_ref_name = repotools.create_local_branch_ref_name(base_branch_ref.name)
        local_branch_name = repotools.create_local_branch_name(base_branch_ref.name)
        if local_branch_ref_name == base_branch_ref.name:
            git_or_fail(context.repo, command_context.result,
                        ['checkout', local_branch_name],
                        _("Failed to checkout branch {branch_name}.")
                        .format(branch_name=repr(base_branch_ref.short_name))
                        )
        else:
            git_or_fail(context.repo, command_context.result,
                        ['checkout', '-b', local_branch_name, base_branch_ref.name],
                        _("Failed to checkout branch {branch_name}.")
                        .format(branch_name=repr(base_branch_ref.short_name))
                        )

        git_or_fail(context.repo, command_context.result,
                    ['merge', '--no-ff', work_branch_ref],
                    _("Failed to merge work branch.\n"
                      "Rebase {work_branch} on {base_branch} and try again")
                    .format(work_branch=repr(work_branch_ref.short_name),
                            base_branch=repr(base_branch_ref.short_name))
                    )

        git_or_fail(context.repo, command_context.result,
                    ['push', context.config.remote_name, local_branch_name],
                    _("Failed to push branch {branch_name}.")
                    .format(branch_name=repr(base_branch_ref.short_name))
                    )

    return context.result
コード例 #52
0
def test_should_get_less(left, right):
    assert compare(left, right) == -1
コード例 #53
0
def test_pytest():
    import pytest
    assert semver.compare(pytest.__version__, '3.5.0') == 1
コード例 #54
0
 def _is_server_version_greater(self, checked_version):
     return True if semver.compare(self.server_version,
                                   checked_version) >= 0 else False
コード例 #55
0
ファイル: tests.py プロジェクト: ofek/python-semver
def test_should_compare_prerelease_with_numbers_and_letters():
    v1 = VersionInfo(major=1, minor=9, patch=1, prerelease='1unms', build=None)
    v2 = VersionInfo(major=1, minor=9, patch=1, prerelease=None, build='1asd')
    assert v1 < v2
    assert compare("1.9.1-1unms", "1.9.1+1") == -1
コード例 #56
0
def run(dry_run, gitlab_project_id=None, thread_pool_size=10):
    settings = queries.get_app_interface_settings()
    clusters = queries.get_clusters()
    clusters = [c for c in clusters if c.get('ocm') is not None]
    ocm_map = OCMMap(clusters=clusters, integration=QONTRACT_INTEGRATION,
                     settings=settings)
    current_state, pending_state = ocm_map.cluster_specs()
    desired_state = fetch_current_state(clusters)

    if not dry_run:
        mr_cli = mr_client_gateway.init(gitlab_project_id=gitlab_project_id)

    error = False
    clusters_updates = {}
    for cluster_name, desired_spec in desired_state.items():
        current_spec = current_state.get(cluster_name)
        if current_spec:
            clusters_updates[cluster_name] = {}
            cluster_path = 'data' + \
                [c['path'] for c in clusters
                 if c['name'] == cluster_name][0]

            # validate version
            desired_spec['spec'].pop('initial_version')
            desired_version = desired_spec['spec'].pop('version')
            current_version = current_spec['spec'].pop('version')
            compare_result = 1  # default value in case version is empty
            if desired_version:
                compare_result = \
                    semver.compare(current_version, desired_version)
            if compare_result > 0:
                # current version is larger due to an upgrade.
                # submit MR to update cluster version
                logging.info(
                    '[%s] desired version %s is different ' +
                    'from current version %s. ' +
                    'version will be updated automatically in app-interface.',
                    cluster_name, desired_version, current_version)
                clusters_updates[cluster_name]['version'] = current_version
            elif compare_result < 0:
                logging.error(
                    '[%s] desired version %s is different ' +
                    'from current version %s',
                    cluster_name, desired_version, current_version)
                error = True

            if not desired_spec['spec'].get('id'):
                clusters_updates[cluster_name]['id'] = \
                    current_spec['spec']['id']

            if not desired_spec['spec'].get('external_id'):
                clusters_updates[cluster_name]['external_id'] = \
                    current_spec['spec']['external_id']

            desired_provision_shard_id = \
                desired_spec['spec'].get('provision_shard_id')
            current_provision_shard_id = \
                current_spec['spec']['provision_shard_id']
            if desired_provision_shard_id != current_provision_shard_id:
                clusters_updates[cluster_name]['provision_shard_id'] = \
                    current_provision_shard_id

            if clusters_updates[cluster_name]:
                clusters_updates[cluster_name]['path'] = cluster_path

            # exclude params we don't want to check in the specs
            for k in ['id', 'external_id', 'provision_shard_id']:
                current_spec['spec'].pop(k, None)
                desired_spec['spec'].pop(k, None)

            # validate specs
            if current_spec != desired_spec:
                logging.error(
                    '[%s] desired spec %s is different ' +
                    'from current spec %s',
                    cluster_name, desired_spec, current_spec)
                error = True
        else:
            # create cluster
            if cluster_name in pending_state:
                continue
            logging.info(['create_cluster', cluster_name])
            ocm = ocm_map.get(cluster_name)
            ocm.create_cluster(cluster_name, desired_spec, dry_run)

    create_update_mr = False
    for cluster_name, cluster_updates in clusters_updates.items():
        for k, v in cluster_updates.items():
            if k == 'path':
                continue
            logging.info(
                f"[{cluster_name}] desired key " +
                f"{k} will be updated automatically " +
                f"with value {v}."
            )
            create_update_mr = True
    if create_update_mr and not dry_run:
        mr = CreateClustersUpdates(clusters_updates)
        mr.submit(cli=mr_cli)

    if error:
        sys.exit(1)
コード例 #57
0
def test_should_ignore_builds_on_compare(left, right, expected):
    assert compare(left, right) == expected
コード例 #58
0
ファイル: tests.py プロジェクト: ofek/python-semver
def test_should_get_more_rc1():
    assert compare("1.0.0-rc1", "1.0.0-rc0") == 1
def search_dep(dependency_name, dependency_version_range, direct_dep, level,
               times):

    global global_search_list
    global targetFramework
    global irregularity_list

    if level > 6:
        return
    level = +1
    if "(" in dependency_version_range:
        irregularity_list.append(dependency_name + "@" +
                                 dependency_version_range)

    # 实际匹配版本
    dependency_version = matching_version_from_dict(dependency_name,
                                                    dependency_version_range)
    children_dependencies_list, matched_framework = get_dep_info(
        dependency_name, dependency_version, direct_dep)

    children_name_list = []
    for children_dependency in children_dependencies_list:
        children_name_list.append(children_dependency[0])

    temp_search_info = []
    for search_info in global_search_list:
        ser_dep_name = search_info[0]

        if dependency_name == ser_dep_name:
            temp_search_info = search_info
            break

    # 遇到同名依赖后,检测
    if temp_search_info != []:
        installed_dep_name = temp_search_info[0]
        installed_dep_version = temp_search_info[1]
        installed_dep_version_range = temp_search_info[2]
        installed_dep_children_name_list = temp_search_info[3]
        installed_dep_direct_dep = temp_search_info[4]

        # 检查两个版本之间是否有交集
        # 检查已安装版本是否在新的依赖范围内
        check_installed_version = matching_version.check_verison_in_versionrange(
            installed_dep_version, dependency_version_range)

        if check_installed_version == "":
            # 检查新的版本是否在已安装依赖范围内
            check_dependency_version = matching_version.check_verison_in_versionrange(
                dependency_version, installed_dep_version_range)
            if check_dependency_version != "":
                # print("有交集,需要回溯")

                # 卸载已安装的依赖及其全部子节点
                uninstall_dep_and_all_children(dependency_name)

                new_install_dep_list = []
                new_install_dep_list.append(dependency_name)
                new_install_dep_list.append(dependency_version)
                new_install_dep_list.append(dependency_version_range)
                new_install_dep_list.append(children_name_list)
                new_install_dep_list.append(direct_dep)
                new_install_dep_list.append(matched_framework)

                global_search_list.append(new_install_dep_list)
                # 递归子节点
                for children_dependency in children_dependencies_list:
                    children_dependency_name = children_dependency[0]
                    children_dependency_version_range = children_dependency[1]
                    if children_dependency_name != '' and children_dependency_name is not None:
                        search_dep(children_dependency_name,
                                   children_dependency_version_range,
                                   direct_dep, level, times)

            else:
                if semver.compare(dependency_version,
                                  installed_dep_version) == -1:
                    # print(dependency_version, installed_dep_version)

                    # 低版本为直接依赖
                    if dependency_name + "@" + dependency_version == direct_dep:
                        str_print(
                            "*****************************************************************"
                        )
                        str_print("error! NU1605 直接依赖版本版本低于间接依赖" +
                                  dependency_name)
                        str_print("直接版本:" + dependency_version)
                        str_print("间接依赖范围:" + installed_dep_version_range)

                        # 添加到冲突队列
                        if direct_dep + "_" + installed_dep_version not in error_directly_dependency_list:
                            error_directly_dependency_list.append(
                                direct_dep + "_" + installed_dep_version)

                    # 低版本为间接依赖
                    elif dependency_name + "@" + installed_dep_version == installed_dep_direct_dep:
                        # print("*****************************************************************")
                        # print("error! NU1608 直接依赖版本高于间接依赖,但无交集", dependency_name)
                        # print("直接版本:", installed_dep_version)
                        # print("间接依赖范围:", dependency_version_range)

                        # 添加到冲突队列
                        if direct_dep not in error_directly_dependency_list:
                            error_directly_dependency_list.append(direct_dep)

                    # 高低版本呢均为间接依赖
                    else:
                        str_print(
                            "*****************************************************************"
                        )
                        str_print("error! NU1107 间接依赖之间无交集" + dependency_name)
                        str_print("已安装版本:" + installed_dep_version)
                        str_print("待检测依赖范围:" + dependency_version_range)
                        str_print("冲突间接依赖范围:" + installed_dep_version_range)

                        # 添加到冲突队列
                        if direct_dep not in error_directly_dependency_list:
                            error_directly_dependency_list.append(direct_dep)
                else:
                    # 5.0.0 3.1.8
                    # print(dependency_version, installed_dep_version)

                    # 低版本为直接依赖
                    if dependency_name + "@" + installed_dep_version == installed_dep_direct_dep:
                        str_print(
                            "*****************************************************************"
                        )
                        str_print("error! NU1605 直接依赖版本版本低于间接依赖" +
                                  dependency_name)
                        str_print("直接版本:" + installed_dep_version)
                        str_print("间接依赖范围:" + dependency_version_range)

                        # 添加到冲突队列
                        if installed_dep_direct_dep + "_" + dependency_version not in error_directly_dependency_list:
                            error_directly_dependency_list.append(
                                installed_dep_direct_dep + "_" +
                                dependency_version)

                    # 低版本为间接依赖
                    elif dependency_name + "@" + dependency_version == direct_dep:
                        # print("*****************************************************************")
                        # print("error! NU1608 直接依赖版本高于间接依赖,但无交集",dependency_name)
                        # print("直接版本:",dependency_version)
                        # print("间接依赖范围:", installed_dep_version_range)

                        # 添加到冲突队列
                        if installed_dep_direct_dep not in error_directly_dependency_list:
                            error_directly_dependency_list.append(
                                installed_dep_direct_dep)

                    # 高低版本呢均为间接依赖
                    else:
                        str_print(
                            "*****************************************************************"
                        )
                        str_print("error! NU1107 间接依赖之间无交集" + dependency_name)
                        str_print("已安装版本:" + installed_dep_version)
                        str_print("待检测依赖范围:" + dependency_version_range)
                        str_print("冲突间接依赖范围:" + installed_dep_version_range)

                        # 添加到冲突队列
                        if installed_dep_direct_dep not in error_directly_dependency_list:
                            error_directly_dependency_list.append(
                                installed_dep_direct_dep)

        # 已安装版本匹配目标范围,无需操作
        elif times == 1 and level < 10:
            # 递归子节点,防止有多删除的情况
            for children_dependency in children_dependencies_list:
                children_dependency_name = children_dependency[0]
                children_dependency_version_range = children_dependency[1]
                if children_dependency_name != '' and children_dependency_name is not None:
                    search_dep(children_dependency_name,
                               children_dependency_version_range, direct_dep,
                               level, times)

    # 无同名依赖包,正常执行安装
    else:
        new_install_dep_list = []
        new_install_dep_list.append(dependency_name)
        new_install_dep_list.append(dependency_version)
        new_install_dep_list.append(dependency_version_range)
        new_install_dep_list.append(children_name_list)
        new_install_dep_list.append(direct_dep)
        new_install_dep_list.append(matched_framework)

        global_search_list.append(new_install_dep_list)
        # 递归子节点
        for children_dependency in children_dependencies_list:
            children_dependency_name = children_dependency[0]
            children_dependency_version_range = children_dependency[1]
            if children_dependency_name != '' and children_dependency_name is not None:
                search_dep(children_dependency_name,
                           children_dependency_version_range, direct_dep,
                           level, times)
コード例 #60
0
async def fetch_and_update_releases(app, ignore_errors=False):
    """
    Get a list of releases, from the Virtool website, published since the current server version.

    :param app: the application object

    :param ignore_errors: ignore errors during request to virtool.ca
    :type ignore_errors: bool

    :return: a list of releases
    :rtype: Coroutine[list]

    """
    db = app["db"]
    version = app["version"]
    session = app["client"]
    settings = app["settings"]

    if app is None or version == "Unknown":
        return list()

    try:
        async with virtool.http.proxy.ProxyRequest(
                settings, session.get, VIRTOOL_RELEASES_URL) as resp:
            data = await resp.text()
            data = json.loads(data)

        logger.debug("Retrieved software releases from www.virtool.ca")
    except aiohttp.ClientConnectorError:
        # Return any existing release list or `None`.
        logger.debug("Could not retrieve software releases")

        await db.status.update_one(
            {"_id": "software"},
            {"$set": {
                "errors": ["Could not retrieve software releases"]
            }})

        if not ignore_errors:
            raise

        return await virtool.db.utils.get_one_field(db.status, "releases",
                                                    "software")

    data = data["virtool"]

    channel = settings["software_channel"]

    # Reformat the release dicts to make them more palatable. If the response code was not 200, the releases list
    # will be empty. This is interpreted by the web client as an error.
    if channel == "stable":
        data = [
            r for r in data
            if "alpha" not in r["name"] and "beta" not in r["name"]
        ]

    elif channel == "beta":
        data = [r for r in data if "alpha" not in r["name"]]

    releases = list()

    for release in data:
        if semver.compare(release["name"].replace("v", ""),
                          version.replace("v", "")) < 1:
            break

        releases.append(release)

    await db.status.update_one({"_id": "software"},
                               {"$set": {
                                   "errors": [],
                                   "releases": releases
                               }})

    return releases