def test_should_compare_prerelease_with_numbers_and_letters(): v1 = VersionInfo(major=1, minor=9, patch=1, prerelease="1unms", build=None) v2 = VersionInfo(major=1, minor=9, patch=1, prerelease=None, build="1asd") assert v1 < v2 assert compare("1.9.1-1unms", "1.9.1+1") == -1
def test_should_be_able_to_use_integers_as_prerelease_build(): v = VersionInfo(1, 2, 3, 4, 5) assert isinstance(v.prerelease, str) assert isinstance(v.build, str) assert VersionInfo(1, 2, 3, 4, 5) == VersionInfo(1, 2, 3, "4", "5")
def _compare_semver(version_1: str, version_2: str) -> bool: """Determine if Version 1 is greater than Version 2.""" return VersionInfo.parse(version_1) > VersionInfo.parse(version_2) # type: ignore
def parse(cls, version, **kwargs): return cls(**SemVer.parse(version).to_dict(), **kwargs)
async def all_ports() -> Iterable[port.Port]: return itertools.chain.from_iterable(await util.wait_all(( auto.enumerate_simple_github( owner='zajo', repo='leaf', package_name='boost.leaf', library_name='leaf', namespace='boost', ), auto.enumerate_simple_github( owner='boostorg', repo='mp11', namespace='boost', package_name='boost.mp11', library_name='mp11', ), auto.enumerate_simple_github( owner='apolukhin', repo='magic_get', namespace='boost', package_name='boost.pfr', library_name='pft', ), auto.enumerate_simple_github( owner='hanickadot', repo='compile-time-regular-expressions', min_version=VersionInfo(2, 8, 1), package_name='ctre', namespace='hanickadot', library_name='ctre', ), auto.enumerate_simple_github( owner='fmtlib', repo='fmt', namespace='fmt', min_version=VersionInfo(6), ), auto.enumerate_simple_github( owner='Neargye', repo='magic_enum', package_name='magic_enum', namespace='neargye', library_name='magic_enum', ), auto.enumerate_simple_github( owner='Neargye', repo='nameof', package_name='nameof', namespace='neargye', library_name='nameof', ), auto.enumerate_simple_github( owner='marzer', repo='tomlplusplus', namespace='tomlpp', package_name='tomlpp', library_name='tomlpp', ), auto.enumerate_simple_github( owner='ericniebler', repo='range-v3', package_name='range-v3', namespace='range-v3', library_name='range-v3', ), auto.enumerate_simple_github( owner='nlohmann', repo='json', min_version=VersionInfo(3, 5, 0), package_name='nlohmann-json', namespace='nlohmann', library_name='json', ), auto.enumerate_simple_github( owner='vector-of-bool', repo='wil', package_name='ms-wil', namespace='microsoft', library_name='wil', ), auto.enumerate_simple_github( owner='taocpp', repo='PEGTL', package_name='pegtl', namespace='tao', library_name='pegtl', min_version=VersionInfo(2, 6, 0), fs_transform=_remove_src, ), auto.enumerate_simple_github( owner='pantor', repo='inja', package_name='inja', namespace='inja', library_name='inja', depends=['[email protected]'], min_version=VersionInfo(2, 1, 0), uses=['nlohmann/json'], ), auto.enumerate_simple_github( owner='USCiLab', repo='cereal', package_name='cereal', namespace='cereal', library_name='cereal', min_version=VersionInfo(0, 9, 0), ), auto.enumerate_simple_github( owner='pybind', repo='pybind11', package_name='pybind11', namespace='pybind', library_name='pybind11', min_version=VersionInfo(2, 0, 0), ), auto.enumerate_simple_github( owner='imneme', repo='pcg-cpp', package_name='pcg-cpp', namespace='pcg', library_name='pcg-cpp', min_version=VersionInfo(0, 98, 1), ), auto.enumerate_simple_github( owner='HowardHinnant', repo='date', package_name='hinnant-date', namespace='hinnant', library_name='date', min_version=VersionInfo(2, 4, 1), fs_transform=_remove_src, ), auto.enumerate_simple_github( owner='lua', repo='lua', namespace='lua', min_version=VersionInfo(5, 1, 1), fs_transform=move_sources_into_src, ), auto.enumerate_simple_github( owner='ThePhD', repo='sol2', namespace='sol2', min_version=VersionInfo(3), depends=['[email protected]'], uses=['lua/lua'], ), auto.enumerate_simple_github( owner='gabime', repo='spdlog', namespace='spdlog', depends=['[email protected]'], uses=['fmt/fmt'], min_version=VersionInfo(1, 4, 0), fs_transform=fixup_spdlog, ), auto.enumerate_simple_github( owner='soasis', repo='text', package_name='ztd.text', namespace='ztd', library_name='text', ), )))
def parse_java_version_string(version_string: str) -> Optional[VersionInfo]: try: return VersionInfo.parse(version_string) except ValueError: return parse_alternative_java_version_numbers(version_string)
def __init__(self, version: str): str.__init__(version) self._info = VersionInfo.parse(version)
def __init__(self, version: str = "0.0.1", *args, **kwargs): super(SemanticVersionerMixin, self).__init__(*args, **kwargs) self._version = Version.parse(version) self._version_history = {} self._last_digest = None
def main(ids,arguments,nointeraction=False): # ============== Get Data =============================================== if not os.path.isfile('.'+os.sep+'sailboat.toml'): print('Please create a config file with `sailboat wizard` first.') sys.exit(0) try: data = toml.loads(open('.'+os.sep+'sailboat.toml').read()) except toml.decoder.TomlDecodeError as e: print('Config error:\n\t'+str(e)) exit() # ============== Get VersionInfo =============================================== if 'latest_build' not in data: data['latest_build'] = '0.0.1' if len(ids) >= 2: #Something provided if VersionInfo.isvalid(ids[1]): version = ids[1] elif ids[1].startswith('maj'): version = str(VersionInfo.parse(data['latest_build']).bump_major()) elif ids[1].startswith('min'): version = str(VersionInfo.parse(data['latest_build']).bump_minor()) elif ids[1].startswith('pat'): version = str(VersionInfo.parse(data['latest_build']).bump_patch()) elif ids[1].startswith('pre') or ids[1].startswith('dev'): version = str(VersionInfo.parse(data['latest_build']).bump_prerelease()) else: print('Unknown version `{}`'.format(ids[1])) sys.exit(0) else: try: latestcommit = os.popen('git rev-parse --short HEAD').read().replace('\n','') except KeyboardInterrupt: latestcommit = "build" if latestcommit in data['latest_build']: version = str(VersionInfo.parse(data['latest_build']).bump_build()) else: version = str(VersionInfo.parse(data['latest_build']).replace(build=latestcommit+".1")) if compare(version,data['latest_build']) == -1 and not (ids[1].startswith('pre') or ids[1].startswith('dev')): if input(f'\u001b[31mYou are building a version ({version}) that comes before the previously built version ({data["latest_build"]}). Do you wish to continue? [y/n] \u001b[0m')[0]=='n' or nointeraction: print() sys.exit(0) print('\nPreparing to build version {}\n'.format(version)) # ============== Pre-build script =============================================== if 'build_script' in data['build']: try: buildscript = __import__(data['build']['build_script'].replace('.py','')) except BaseException as e: print('Error with custom prebuild script:\n'+str(e)) pass try: newdata = buildscript.pre(version,data) if isinstance(newdata,dict): data = newdata except BaseException as e: print('Error with custom prebuild script:\n\t`'+str(e)+"`") pass # ============== Show what will happen =============================================== print('This command will build the following:') print('\t- Generate a correct directory structure, setup.py, .gitignore, etc...') only=False for x in arguments: if '-only' in x[0]: only=True break dopypi,dobrew,domac,dowin,doact,doset=False,False,False,False,False,False if only: if ('--pypi-only','') in arguments: dopypi = True elif ('--homebrew-only','') in arguments: dobrew = True elif ('--windows-only','') in arguments and sys.platform.startswith('win'): dowin = True elif ('--mac-only','') in arguments and sys.platform.startswith('darwin'): domac = True elif ('--actions-only','') in arguments: doact = True elif ('--setup-only','') in arguments: doset = True elif ('--unix-only','') in arguments: donix = True elif 'actions_only' in data['build'] and data['build']['actions_only'] and not ('CI' in os.environ and os.environ['CI']=="TRUE"): dopypi = True dobrew = True dowin = False domac = False donix = False doact = True doset = True else: dopypi = True dobrew = data['build']['homebrew'] dowin = data['build']['windows'] and sys.platform.startswith('win') domac = data['build']['mac'] and sys.platform.startswith('darwin') donix = data['build']['unix'] and sys.platform.startswith('l') doact = data['build']['actions'] doset =True if ('--no-installer','') in arguments: doinstall=False else: doinstall=data['build']['installer'] if dopypi: print('\t- A distributable Python module.') if dobrew: print('\t- A Homebrew formula.') if dowin: installer = " with a .msi installer." if doinstall else "." print('\t- A Windows app'+installer) if domac: installer = " with a .dmg installer." if doinstall else "." print('\t- A Mac app'+installer) if donix: print('\t- A Unix executable') if doact: print('\t- A GitHub Actions file for building Mac and Windows apps and publishing a Github release.') if not nointeraction: input('Press enter to continue.') # ============== Create bin Script =============================================== try: os.mkdir('bin') except: pass bins = [] for commandname in data['build']['commands'].keys(): if data['build']['commands'][commandname] == '': open("bin"+os.sep+commandname,'w+').write(f"#!"+os.sep+"usr"+os.sep+"bin"+os.sep+f"env bash\npython3 -m {data['short_name']} $@") bins.append('bin'+os.sep+commandname) # ============== Get module names =============================================== if 'no_import' not in data['resources']: data['resources']['no_import'] = [] mods = [] for x in glob.glob(data['short_name']+os.sep+'*.py'): f = open(x) mods += re.findall('(?m)(?:from[ ]+(\S+)[ ]+)?import[ ]+\S+?:[ ]+as[ ]+\S+?[ ]*$',f.read()) f.close() modules = [] for x in mods: modules.append(x[1].split('.')[0]) for module in set(modules): if module not in data['resources']['no_import'] and ( module!= data['short_name'] and module not in sys.builtin_module_names and module not in data['resources']['modules']): print('Checking for {} on PyPi...'.format(module)) response = requests.get("https://pypi.python.org/pypi/{}/json".format(module)) if response.status_code == 200: data['resources']['modules'].append(module) else: data['resources']['no_import'].append(module) # ============== Generate setup.py =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating setup.py\u001b[0m') if doset: if 'custom_setup' in data: cu=str(data['custom_setup']) else: cu=str({}) with open(prefix+os.sep+'setup.py.template') as datafile: template = datafile.read() entries = [] for commandname in data['build']['commands'].keys(): if data['build']['commands'][commandname]!="": modname = ".".join(data['build']['commands'][commandname].split('.')[:-1]) funcname = data['build']['commands'][commandname].split('.')[-1] entries.append(commandname+"="+data["short_name"]+"."+modname+":"+funcname) try: pyv = version.split('+')[0] except: pyv = version setup = template.format( **data, **data['resources'], cu=cu, bins=bins, version = pyv, entry_points = entries ) open('setup.py','w+').write(setup) # ============== Generate directory structure =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating Directory Structure\u001b[0m') if not os.path.isfile('.gitignore'): open('.'+os.sep+'.gitignore','w+').write(open(prefix+os.sep+'gitignore.template').read().replace('/',os.sep)) source_dir = os.getcwd() target_dir = data["short_name"]+os.sep types = ('*.py',*data['resources']["data_files"]) file_names = [] for files in types: file_names.extend(glob.glob(files)) if not os.path.isdir(target_dir): os.mkdir(target_dir) try: bs = data['build']['build_script'] except: bs = "RANDOM-----edfskjsdhflkjdhflksdjhflkasjdhflkasjdhflkasjdhflkajsdhflkjadshf" for file_name in file_names: if file_name in ("setup.py","sailboat.toml",bs): continue shutil.move(os.path.join(source_dir, file_name), target_dir+os.sep+file_name) for filename in glob.glob(target_dir+os.sep+'LICE*'): shutil.copyfile(filename,'LICENSE') if not os.path.isfile(target_dir+'__init__.py'): open(target_dir+'__init__.py','w+').write('# This file must exist, empty or not') if data['resources']['file']!="" and not os.path.isfile(data['short_name']+os.sep+'__main__.py'): try: os.rename(data['short_name']+os.sep+data['resources']['file'],data['short_name']+os.sep+'__main__.py') open(data['short_name']+os.sep+data['resources']['file'],'w+').write('# Please edit __main__.py for the main code. Thanks!\n(you can delete this file.)') except FileNotFoundError: pass # ============== Generate pypi files =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating PyPi files...\u001b[0m') if dopypi: try: shutil.rmtree('dist') except: pass os.system('python3 .'+os.sep+'setup.py bdist_wheel sdist -d dist'+os.sep+'pypi') try: shutil.rmtree('build') except: pass for x in glob.glob('dist'+os.sep+'*.whl'): os.rename(x,x.replace('dist'+os.sep,'dist'+os.sep+'pypi'+os.sep)) for x in glob.glob('*.egg-info'): shutil.rmtree(x) # ============== Generate homebrew file =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating Homebrew file...\u001b[0m') if dobrew: retmp = ' resource "{name}" do\n url "{url}"\n sha256 "{sha256}"\n end\n' resources = '' for modulename in data['resources']['modules']: req = requests.get('https://pypi.org/pypi/{}/json'.format(modulename)).json() versionPy = req['info']['version'] url = req['releases'][versionPy][0]['url'] sha256 = req['releases'][versionPy][0]['digests']['sha256'] if not (url.endswith('.tar.gz') or url.endswith('.zip')): try: url = req['releases'][versionPy][1]['url'] sha256 = req['releases'][versionPy][1]['digests']['sha256'] except: continue resources+=retmp.format(name=modulename,url=url,sha256=sha256) os.makedirs('dist'+os.sep+'homebrew') f = open('dist'+os.sep+'homebrew'+os.sep+'{name}.rb'.format(name=data['name']),'w+') f.write(open(prefix+os.sep+'brew.rb').read().format( **data, resources2 = resources, version = version )) f.close() # ============== Generate w/Pyinstaller =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating Pyinstaller files...\u001b[0m') # domac = True if dowin or domac or donix: try: import PyInstaller.__main__ except: print('Installing PyInstaller...') os.system('pip3 install pyinstaller') import PyInstaller.__main__ mods = [] for x in data['resources']['modules']: mods.append('--hidden-import') mods.append(x) datafiles = [] for x in data['resources']['data_files']: for g in glob.glob(data['short_name']+os.sep+x): datafiles.append('--add-data') datafiles.append(g+os.pathsep+g.replace(data['short_name']+os.sep,'')) typ = '--nowindowed' if data['build']['type']=='1' else '--noconsole' ico = ['--icon',data['resources']['icon']] if 'icon' in data['resources'] else [] options = [ data['short_name']+os.sep+'__main__.py', '--onefile', '--name', data['name'] if ('-n','')not in arguments else 'app', '--distpath', '.'+os.sep+'dist'+os.sep+'pyinstaller', *mods, *datafiles, typ, *ico, '--osx-bundle-identifier', data['build']['bundle_id'] ] print(options) PyInstaller.__main__.run(options) try: print('removing '+data['name']+".spec...") os.remove(data['name']+".spec") except: print('removing app.spec...') os.remove("app.spec") # ============== Mac .app Bundle =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating Mac .app bundle...\u001b[0m') if domac: os.chdir('dist') os.mkdir(data['name']) os.chdir(data['name']) os.mkdir('Contents') os.chdir('Contents') os.mkdir('MacOS') os.mkdir('Resources') infoPlist = open('Info.plist','w+') infoPlist.write(open(prefix+'/info.plist.xml').read().format( **data, **data['build'], version = version )) infoPlist.close() shutil.copy('./../../pyinstaller/'+data['name'],'MacOS') os.chdir('./../../..') os.rename('./dist/'+data['name'],'./dist/'+data['name']+".app") else: print('not generating mac .app bundle because on {} not mac.'.format(sys.platform)) # ============== Generate Installer Package =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating Installer Package...\u001b[0m') if sys.platform.startswith('win') and doinstall and False:#WINDOWS os.system('pip install distro') os.system('pip install git+https://github.com/x24git/wixpy') d = open(prefix+os.sep+'wixpy.template.json').read().format( **data, version=version, icns=data['resources']['icon'], keywo=", ".join(data['keywords']) ) open('wixpy.json','w+').write(d) print(d) print('STARTING WIXPY...') os.system('wix.py wixpy.json') elif sys.platform.startswith('darwin'):#MAC try: import dmgbuild except: print('Installing dmgbuild') os.system('pip3 install dmgbuild') import dmgbuild del dmgbuild if not os.path.isdir('build'): os.mkdir('build') open('build'+os.sep+'settings.py','w+').write(open(prefix+os.sep+'settings.py.template').read().format( **data, version=version, icns=data['resources']['icon'], keywo=", ".join(data['keywords']) )) os.system(f'cat build/settings.py;dmgbuild -s .{os.sep}build{os.sep}settings.py "{data["name"]} Installer" ./{data["name"]}.dmg') else: print(f'Installer creation not yet supported for {sys.platform}!') # ============== Generate Github Actions Workflow =============================================== print('\n\n\u001b[4m\u001b[1;36mGenerating GitHub Actions File...\u001b[0m') if doact: try: oldact = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml').read().split('\n')[0] except: oldact="\n" try: f = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml','w+') except: os.system('mkdir -p .github'+os.sep+'workflows'+os.sep) f = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml','w+') newdata = open(prefix+os.sep+'sailboat.yml.template').read().format( **data, mac=""if data['build']['mac']else"#", windows=""if data['build']['windows']else"#", win_ext=".exe"if data['build']['installer']else".exe", mac_ext=".dmg"if not data['build']['unix']else"", u=""if data['build']['unix']else"#" ).replace('\t',' ') f.write(newdata) f.close() newact = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml').read().split('\n')[0] data['build']['actions_built_latest'] = newact != oldact print(oldact) print(newact) # ============== Post build =============================================== try: newdata = buildscript.post(version,data) if isinstance(newdata,dict): data = newdata except: pass # ============== Save VersionInfo =============================================== data['latest_build'] = version open('sailboat.toml','w+').write(toml.dumps(data)) os.system('python .'+os.sep+'setup.py develop')
def check(): c = PostgreSql('postgres', {}, [{'dbname': 'dbname', 'host': 'localhost', 'port': '5432', 'username': USER}]) c._version = VersionInfo(9, 2, 0) return c
def version_bump(version: semver.VersionInfo): """ Bump the version number Args: version (semver.VersionInfo): parsed value Returns: bumped (semver.VersionInfo): parsed value with bump """ choices = [ { 'name': 'Patch: {} ==> {}'.format(version, version.bump_patch()), 'value': version.bump_patch() }, { 'name': 'Minor: {} ==> {}'.format(version, version.bump_minor()), 'value': version.bump_minor() }, { 'name': 'Major: {} ==> {}'.format(version, version.bump_major()), 'value': version.bump_major() }, { 'name': 'Build: {} ==> {}'.format(version, version.bump_build()), 'value': version.bump_build() }, { 'name': 'Prerelease: {} ==> {}'.format(version, version.bump_prerelease()), 'value': version.bump_prerelease() }, Separator(), { 'name': 'Manual type version', 'value': 'manual' }, Separator(), { 'name': 'Quit', 'value': 'quit' }, ] # PyInquirer has a mouse-click problem (yes you read that correctly) # https://github.com/CITGuru/PyInquirer/issues/41 # Until that's fixed we need to handle menus carefully response = None while response is None: answers = prompt([{ 'type': 'list', 'name': 'verbump', 'message': 'What bump do you want?', 'choices': choices }]) if 'verbump' in answers: response = answers['verbump'] if type(response) is str and response == 'manual': return get_manual() elif type(response) is str and response == 'quit': exit(0) else: return response
def run(self, **kwargs): manager = enlighten.get_manager() term = blessed.Terminal() if len(self.options) > 0 and self.options[0] == 'help': print( "usage: sail build [version (or) increment] [plugins ...]\n\tThis command builds your project using the " "sailboat.toml file.\n\tValid options for version:\n\t\t- Valid semver.org string: set that as " "version\n\t\t- `major`: increments the major version by one.\n\t\t- `minor`: increments the minor " "version by one.\n\t\t- `patch`: increments the patch version by one.\n\t\t- `pre`: increments the " "prerelease version by one.\n\t\t- None: increments build version by one.") return # Get Version ===================================================================================== if 'latest_build' not in self.data: self.data['latest_build'] = '0.0.1' if len(self.options) >= 1: # Something provided if VersionInfo.isvalid(self.options[0]): version = self.options[0] elif self.options[0].startswith('maj'): version = str(VersionInfo.parse(self.data['latest_build']).bump_major()) elif self.options[0].startswith('min'): version = str(VersionInfo.parse(self.data['latest_build']).bump_minor()) elif self.options[0].startswith('pat'): version = str(VersionInfo.parse(self.data['latest_build']).bump_patch()) elif self.options[0].startswith('+') or self.options[0].startswith('build'): version = str(VersionInfo.parse(self.data['latest_build']).bump_build()) else: print('Unknown version `{}`'.format(self.options[0])) return if '.pre' in self.options[0] or '.dev' in self.options[0]: version = str(VersionInfo.parse(self.data['latest_build']).bump_prerelease()) else: try: latestcommit = os.popen('git rev-parse --short HEAD').read().replace('\n', '') except KeyboardInterrupt: latestcommit = "build" if latestcommit in self.data['latest_build']: version = str(VersionInfo.parse(self.data['latest_build']).bump_build()) else: version = str(VersionInfo.parse(self.data['latest_build']).replace(build=latestcommit + ".1")) if compare(version, self.data['latest_build']) == -1 and not ( self.options[0].startswith('pre') or self.options[0].startswith('dev')): if input( term.red + f'You are building a version ({version}) that comes before the previously built version ({self.data["latest_build"]}). Do you wish to continue? [y/n]' + term.normal)[ 0] == 'n' or ('-y' in self.options or '--no-interaction' in self.options): print() return status_format = '{program}{fill}{current}{fill}{version}' status_bar = manager.status_bar(status_format=status_format, color='white_on_blue', program=self.data['name'], current='building directory structure', version=version) print('\nPreparing to build version {}\n'.format(version)) self.data['latest_build'] = version if len(self.options[1:]) == 0: notdones = [*self.data['build'].keys()] else: notdones = self.options[1:] progress_bar = manager.counter(total=len(notdones) + 3, desc='Build', unit='jobs', color="grey") prebuild = progress_bar.add_subcounter('white') postbuild = progress_bar.add_subcounter('darkgrey') # ===================================================================================== if not os.path.isfile('.gitignore'): open('.' + os.sep + '.gitignore', 'w+', encoding="utf8").write( self.getResource('resources' + os.sep + 'gitignore.template').read().replace('/', os.sep)) source_dir = os.getcwd() target_dir = self.data["short_name"] + os.sep types = ('*.py', *self.data['resources']["data_files"]) file_names = [] for files in types: file_names.extend(glob.glob(files)) if not os.path.isdir(target_dir): os.mkdir(target_dir) for file_name in file_names: if file_name in ("setup.py", "sailboat.toml", self.data['resources']['file']): continue shutil.move(os.path.join(source_dir, file_name), target_dir + os.sep + file_name) for filename in glob.glob(target_dir + os.sep + 'LICE*'): shutil.copyfile(filename, 'LICENSE') if not os.path.isfile(target_dir + '__init__.py'): open(target_dir + '__init__.py', 'w+', encoding="utf8").write('# This file must exist, empty or not') if self.data['resources']['file'] != "" and not os.path.isfile( self.data['short_name'] + os.sep + '__main__.py') and os.path.isfile(self.data['resources']['file']): try: os.rename(self.data['resources']['file'], self.data['short_name'] + os.sep + '__main__.py') except: pass time.sleep(0.1); status_bar.update(current="scanning imports") prebuild.update() # ===================================================================================== print('Scanning module imports...') if 'no_import' not in self.data['resources']: self.data['resources']['no_import'] = [] mods = [] for x in glob.glob(self.data['short_name'] + os.sep + '*.py'): f = open(x, encoding="utf8") b = f.read() f.close() mods += re.findall('^import[ ]+(.*)', b, re.M) mods += re.findall('^from[ ]+(.*) import', b, re.M) modules = [] for x in set(mods): modules.append(x.split('.')[0]) for module in set(modules): if module not in self.data['resources']['no_import'] and ( module != self.data['short_name'] and module not in sys.builtin_module_names and module not in self.data['resources']['modules']): print('Checking for {} on PyPi...'.format(module)) response = requests.get("https://pypi.python.org/pypi/{}/json".format(module)) if response.status_code == 200: self.data['resources']['modules'].append(module) else: self.data['resources']['no_import'].append(module) time.sleep(0.1); status_bar.update(current="removing previous builds") prebuild.update() # ===================================================================================== try: shutil.rmtree('dist') except FileNotFoundError: pass dones = [] for build_plugin in progress_bar(notdones): if build_plugin not in self.options and ( '_run' in self.data['build'][build_plugin] and not self.data['build'][build_plugin]['_run']): continue if build_plugin in dones: continue if build_plugin not in self.data['build'].keys(): continue elif '_needs' in self.data['build'][build_plugin]: if isinstance(self.data['build'][build_plugin]['_needs'], str): self.data['build'][build_plugin]['_needs'] = [self.data['build'][build_plugin]['_needs']] for x in self.data['build'][build_plugin]['_needs']: if x not in dones: notdones.append(build_plugin) build_plugin = x print(term.cyan + term.underline + build_plugin + term.normal + term.nounderline + "\n\n") time.sleep(0.2); status_bar.update(current=build_plugin) try: plugin_type, job = get_plugin(build_plugin, plugin_type="build") job = job( data=self.data, options=[], name=build_plugin, prefix=self.prefix, version=version ) except PluginNotFound: sys.exit(f'You seem to have added the {build_plugin} plugin, but it does not appear to be installed!') try: job.run() except KeyboardInterrupt: print('\n\nUser has aborted at step {}.\n\n'.format(build_plugin)) sys.exit(0) except BaseException as error: print('\n\nError at step {}:\n\n\t{}\n\n'.format(build_plugin, self.red(traceback.print_exc()))) sys.exit(1) self.data[job._type][build_plugin] = job.data[job._type][build_plugin] dones.append(build_plugin) time.sleep(0.1); status_bar.update(current="running develop") print(self.section('Finishing up...')) os.system('python3 setup.py develop') postbuild.update() print(self.section('Built files:')) for x in glob.glob(f'.{os.sep}dist{os.sep}*{os.sep}*') +\ glob.glob(f'.{os.sep}dist{os.sep}*'): print(x) time.sleep(0.2); status_bar.update(current='Finished Build!') manager.stop()
def init(metadata, env): version = VersionInfo.parse(kedro_version) if version.match(">=0.17.0"): return ContextHelper(metadata, env) else: return ContextHelper16(metadata, env)
def stac_get_items(cls, provider_json, collection, bbox, datetime=None, cloud_cover=None, limit=300): logging.info('CollectionsBusiness.stac_get_items\n') logging.info('CollectionsBusiness.stac_get_items - provider_json: %s', provider_json) logging.info('CollectionsBusiness.stac_get_items - collection: %s', collection) logging.info('CollectionsBusiness.stac_get_items - bbox: %s', bbox) logging.info('CollectionsBusiness.stac_get_items - datetime: %s', datetime) logging.info('CollectionsBusiness.stac_get_items - cloud_cover: %s', cloud_cover) logging.info('CollectionsBusiness.stac_get_items - limit: %s', limit) url = provider_json['url'] logging.info('CollectionsBusiness.stac_get_items - url: %s', url) if isinstance(bbox, str): query = 'bbox={}'.format(bbox) elif isinstance(bbox, list): bbox = ",".join(list(map(str, bbox))) query = 'bbox={}'.format(bbox) else: raise BadRequest( '`bbox` field is invalid: `{0}`, it should be a string or list, but its type is {1}.' .format(bbox, type(bbox))) # if STAC <= '0.7.0' if datetime and (VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0')): query += '&time={}'.format(datetime) elif datetime: # default: STAC >= 0.9.0 query += '&datetime={}'.format(datetime) if cloud_cover: query += '&eo:cloud_cover=0/{}'.format(cloud_cover) query += '&limit={}'.format(limit) logging.info('CollectionsBusiness.stac_get_items - query: %s', query) try: response = StacComposeServices.get_collections_collection_id_items( url, collection, query) # logging.debug('CollectionsBusiness.stac_get_items - before post processing - response: %s', response) # post processing to rename fields and add field is it is necessary response = add_context_field_in_the_feature_collection_if_it_does_not_exist( response, page=1, limit=limit) response = rename_fields_from_feature_collection(response) # logging.debug('CollectionsBusiness.stac_get_items - after post processing - response: %s', response) return response except HTTPException as error: logging.debug( 'CollectionsBusiness.stac_get_items - HTTPException.error: %s', error) return create_new_feature_collection(limit=limit, error=error)
def _get_pulumi_version(self) -> VersionInfo: result = self._run_pulumi_cmd_sync(["version"]) version_string = result.stdout.strip() if version_string[0] == "v": version_string = version_string[1:] return VersionInfo.parse(version_string)
def version(): return VersionInfo(major=1, minor=2, patch=3, prerelease='alpha.1.2', build='build.11.e0f985a')
def extract_metadata( match: Match, tag: str, tagtype: RubinTagType ) -> Tuple[str, Optional[VersionInfo], Optional[int]]: """Return a display name, semantic version (optional), and cycle (optional) from match, tag, and type.""" md = match.groupdict() name = tag semver = None ctag = md.get("ctag") cycle = md.get("cycle") cbuild = md.get("cbuild") cycle_int = None rest = md.get("rest") # We have our defaults. The rest is optimistically seeing if we can # do better if tagtype == RubinTagType.UNKNOWN: # We can't do anything better, but we really shouldn't be # extracting from an unknown type. pass elif tagtype == RubinTagType.EXPERIMENTAL: # This one is slightly complicated. Because of the way the build # process works, our tag likely looks like exp_<other-legal-tag>. # So we try that hypothesis. If that's not how the tag is # constructed, nname will just come back as everything # after "exp_". if rest is not None: # it actually never will be None if the regexp matched, but # mypy doesn't know that temp_ptag = RubinPartialTag.parse_tag(rest) # We only care about the display name, not any other fields. name = f"Experimental {temp_ptag.display_name}" else: # Everything else does get an actual semantic version build = RubinPartialTag.trailing_parts_to_semver_build_component( cycle, cbuild, ctag, rest ) typename = RubinPartialTag.prettify_tag(tagtype.name) restname = name[2:] if ( tagtype == RubinTagType.RELEASE or tagtype == RubinTagType.RELEASE_CANDIDATE ): # This is bulky because we don't want to raise an error here # if we cannot extract a required field; instead we let the # field be None, and then the semantic version construction # fails later. That's OK too, because we try that in a # try/expect block and return None if we can't construct # a version. In *that* case we have a tag without semantic # version information--which is allowable. major = RubinPartialTag.maybe_int(md.get("major")) minor = RubinPartialTag.maybe_int(md.get("minor")) patch = RubinPartialTag.maybe_int( md.get("patch", "0") ) # If omitted, it's zero restname = f"r{major}.{minor}.{patch}" pre = md.get("pre") if pre: pre = f"rc{pre}" restname += f"-{pre}" else: # tagtype is weekly or daily year = md.get("year") month = md.get("month") week = md.get("week") day = md.get("day") major = RubinPartialTag.maybe_int(year) if tagtype == RubinTagType.WEEKLY: minor = RubinPartialTag.maybe_int(week) patch = 0 restname = ( f"{year}_{week}" # preserve initial string format ) else: minor = RubinPartialTag.maybe_int(md.get("month")) patch = RubinPartialTag.maybe_int(md.get("day")) restname = ( f"{year}_{month}_{day}" # preserve string format ) pre = None try: semver = VersionInfo( major=major, minor=minor, patch=patch, prerelease=pre, build=build, ) except TypeError as exc: logger.warning(f"Could not make semver from tag {tag}: {exc}") name = f"{typename} {restname}" # Glue together display name. if cycle: name += f" (SAL Cycle {cycle}, Build {cbuild})" if rest: name += f" [{rest}]" cycle_int = RubinPartialTag.maybe_int(cycle) return (name, semver, cycle_int)
def create_tree_html(tree_object, selected_node, edge_type, folded_nodes, height=500): html_template = """ <html> <head> <script src="https://unpkg.com/react@16/umd/react.development.js" crossorigin defer> </script> <script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js" crossorigin defer> </script> <script src="https://unpkg.com/[email protected]" crossorigin defer> </script> <style> .jp-RenderedHTMLCommon table {{ table-layout: inherit; }} .jp-RenderedHTMLCommon ul {{ padding-left: none; }} </style> </head> <body> <div id="{idDiv}"> </div> <script async=false> ReactDOM.render( React.createElement(DecisionTree, {{ style: {{ height: {height} }}, data: {tree}, selectedNode: "{selectedNode}", foldedNodes: {foldedNodes}, edgeType: "{edgeType}" }} ),document.getElementById("{idDiv}") ); </script> </body> </html>""" if height <= 0: raise CraftAiError("A strictly positive height value must be given.") # Checking definition of tree_object if not isinstance(tree_object, dict): raise CraftAiError( "Invalid decision tree format, the given json is not an object.") # Checking version existence tree_version = tree_object.get("_version") if not tree_version: raise CraftAiError( """Invalid decision tree format, unable to find the version""" """ informations.""") # Checking version and tree validity according to version if re.compile(r"\d+.\d+.\d+").match(tree_version) is None: raise CraftAiError( """Invalid decision tree format, "{}" is not a valid version.""". format(tree_version)) elif tree_version >= VersionInfo(1, 0, 0) and tree_version < VersionInfo( 3, 0, 0): if tree_object.get("configuration") is None: raise CraftAiError( """Invalid decision tree format, no configuration found""") if tree_object.get("trees") is None: raise CraftAiError( """Invalid decision tree format, no tree found.""") else: raise CraftAiError( """Invalid decision tree format, {} is not a supported""" """ version.""".format(tree_version)) if folded_nodes is None: folded_nodes = [] elif not isinstance(folded_nodes, list): raise CraftAiError( """Invalid folded nodes format given, it should be an array, found: {}""" .format(folded_nodes)) else: for folded_node in folded_nodes: if not isinstance(folded_node, str) and not re.compile( SELECTED_NODE_REGEX).match(folded_node): raise CraftAiError( """Invalid folded node format given, tt should be a""" """String following this regex: {}, found: {}""".format( SELECTED_NODE_REGEX, folded_nodes)) if edge_type not in ["constant", "absolute", "relative"]: raise CraftAiError( """Invalid edge type given, its value should be a "constant", """ """"absolute" or "relative", found: {}""".format(edge_type)) if not isinstance( selected_node, str) and not re.compile(SELECTED_NODE_REGEX).match(selected_node): raise CraftAiError( """Invalid selected node format given, tt should be a""" """String following this regex: {}, found: {}""".format( SELECTED_NODE_REGEX, selected_node)) return html_template.format( height=height, tree=json.dumps(tree_object), version=REACT_CRAFT_AI_DECISION_TREE_VERSION, selectedNode=selected_node, foldedNodes=folded_nodes, edgeType=edge_type, idDiv=random_string(), )
# (C) Datadog, Inc. 2019-present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) import re import semver from semver import VersionInfo from datadog_checks.base.log import get_check_logger V8_3 = VersionInfo(**semver.parse("8.3.0")) V9 = VersionInfo(**semver.parse("9.0.0")) V9_1 = VersionInfo(**semver.parse("9.1.0")) V9_2 = VersionInfo(**semver.parse("9.2.0")) V9_4 = VersionInfo(**semver.parse("9.4.0")) V9_6 = VersionInfo(**semver.parse("9.6.0")) V10 = VersionInfo(**semver.parse("10.0.0")) class VersionUtils(object): def __init__(self): self.log = get_check_logger() @staticmethod def get_raw_version(db): cursor = db.cursor() cursor.execute('SHOW SERVER_VERSION;') raw_version = cursor.fetchone()[0] return raw_version def is_aurora(self, db):
# Copyright 2016-2021, Pulumi Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from semver import VersionInfo _MINIMUM_VERSION = VersionInfo.parse("2.25.0-alpha")
diff = repo.compare(last_release_tag, last_commit.sha).commits if last_release_tag[0] == "v": USE_PREFIX = True last_release_tag = last_release_tag[1:] else: USE_PREFIX = False try: last_version, _ = coerce(last_release_tag) except ValueError: logging.error("Tag name of last release is not in semver format") sys.exit(1) else: USE_PREFIX = False last_version = VersionInfo.parse('0.0.0') first_commit = repo.get_commits().reversed[0] diff = repo.compare(first_commit.commit.sha, last_commit.sha).commits bumping_strength = max(keyword_detection) new_version = last_version if bumping_strength == 3: new_version = last_version.bump_major() if bumping_strength == 2: new_version = last_version.bump_minor() if bumping_strength == 1: new_version = last_version.bump_patch()
def run(self, **kwargs): version = VersionInfo.parse(self.version) self.version = str( VersionInfo(major=version.major, minor=version.minor, patch=version.patch, prerelease=version.prerelease)) if not (self.getData('mac') or self.getData('windows') or self.getData('unix')): return try: import PyInstaller.__main__ except: print('Installing PyInstaller...') os.system('pip3 install pyinstaller') import PyInstaller.__main__ mods = [] for x in self.data['resources']['modules']: mods.append('--hidden-import') mods.append(x) datafiles = [] for x in self.data['resources']['data_files']: for g in glob.glob(self.data['short_name'] + os.sep + x): datafiles.append('--add-data') datafiles.append(g + os.pathsep + "." + os.sep + os.sep.join( g.replace(self.data['short_name'] + os.sep, '').split(os.sep)[:-1])) typ = '--nowindowed' if self.getData('type') == 1 else '--noconsole' if sys.platform.startswith('dar') and 'icns' in self.data['resources']: ico = ["--icon", self.data['resources']['icns']] if sys.platform.startswith('win') and 'ico' in self.data['resources']: ico = ["--icon", self.data['resources']['ico']] else: ico = [] osname = "macos" if sys.platform.startswith('darwin') else sys.platform options = [ self.data['short_name'] + os.sep + '__main__.py', '--onefile', '--name', self.data['short_name'] + '-' + self.version + '-' + osname, '--distpath', '.' + os.sep + 'dist' + os.sep + 'pyinstaller', *(self.getData('options') if 'options' in self.data2 else []), *mods, *datafiles, typ, *ico, '--osx-bundle-identifier', self.getData('bundle_id') ] print(options) PyInstaller.__main__.run(options) try: print('removing ' + self.data['name'] + ".spec...") os.remove(self.data['name'] + ".spec") except: print('removing app.spec...') try: os.remove("app.spec") except: pass # MAC APP BUNDLE============== print( '\n\n\u001b[4m\u001b[1;36mGenerating Mac .app bundle...\u001b[0m') if (self.getData('mac') and sys.platform.startswith('dar')): os.chdir('dist') os.mkdir(self.data['name']) os.chdir(self.data['name']) os.mkdir('Contents') os.chdir('Contents') os.mkdir('MacOS') os.mkdir('Resources') infoPlist = open('Info.plist', 'w+') infoPlist.write( self.getResource('resources' + os.sep + 'info.plist.xml').read( ).format(**self.data, execname=self.data['name'].replace('_', ''), bundleicon="" if not "icns" in self.data['resources'] else self.data['resources']['icns'].split(os.sep)[-1], **self.data['build'], version=self.version, bundle_id=self.getData('bundle_id'))) infoPlist.close() os.rename( './../../pyinstaller/' + self.data["short_name"] + "-" + self.version + "-macos", 'MacOS/' + self.data['name'].replace('_', '')) if "icns" in self.data['resources']: shutil.copy( './../../../' + ("" if not "icns" in self.data['resources'] else self.data['resources']["icns"]), 'Resources/icon.icns') os.chdir('./../../..') os.rename('./dist/' + self.data['name'], './dist/pyinstaller/' + self.data['name'] + ".app") else: print( 'not generating mac .app bundle because on {} not mac.'.format( sys.platform)) # ============== Generate Installer Package =============================================== print( '\n\n\u001b[4m\u001b[1;36mGenerating Installer Package...\u001b[0m' ) if not (self.getData('windows') or self.getData('unix') or self.getData('mac')): pass elif sys.platform.startswith('darwin'): #MAC try: import dmgbuild except: print('Installing dmgbuild') os.system('pip3 install dmgbuild') import dmgbuild del dmgbuild if not os.path.isdir('build'): os.mkdir('build') open('build' + os.sep + 'settings.py', 'w+').write( self.getResource('resources' + os.sep + 'settings.py.template').read(). format(**self.data, version=self.version, icns="" if not "icns" in self.data['resources'] else self.data['resources']['icns'], keywo=", ".join(self.data['keywords']), ddimage=self.prefix + 'resources' + os.sep + 'dragdrop.png')) os.system( f'cat build/settings.py;dmgbuild -s .{os.sep}build{os.sep}settings.py "{self.data["name"]} Installer" ./dist/pyinstaller/{self.data["short_name"]+"-"+self.version+"-macos"}.dmg' ) else: print(f'Installer creation not yet supported for {sys.platform}!')
import click import kedro import requests from flask import Flask, abort, jsonify, send_from_directory from IPython.core.display import HTML, display from kedro.framework.cli.utils import KedroCliError from kedro.framework.context import KedroContextError, load_context from kedro.io import AbstractDataSet, DataCatalog, DataSetNotFoundError from kedro.pipeline.node import Node from semver import VersionInfo from toposort import toposort_flatten from kedro_viz.utils import wait_for KEDRO_VERSION = VersionInfo.parse(kedro.__version__) _VIZ_PROCESSES = {} # type: Dict[int, multiprocessing.Process] _DEFAULT_KEY = "__default__" _DATA = None # type: Dict _CATALOG = None # type: DataCatalog _JSON_NODES = {} # type: Dict[str, Dict[str, Union[Node, AbstractDataSet, Dict, None]]] app = Flask( # pylint: disable=invalid-name __name__, static_folder=str(Path(__file__).parent.absolute() / "html" / "static") ) ERROR_PROJECT_ROOT = ( "Could not find a Kedro project root. You can run `kedro viz` by either providing "
if __name__ == '__main__': parser = ArgumentParser() parser.add_argument('--major', type=int, required=True) parser.add_argument('--minor', type=int, required=True) parser.add_argument('--patch', type=int, required=True) parser.add_argument('--sign', default=False, action='store_true') parser.add_argument('--release_notes', type=str) args = parser.parse_args() repo = Repo('.') assert str(repo.active_branch) == 'master' assert not repo.is_dirty() next_version = VersionInfo(args.major, args.minor, args.patch) with open('version.pri') as file: major = int(next(file).split(' = ')[1]) minor = int(next(file).split(' = ')[1]) patch = int(next(file).split(' = ')[1]) current_version = VersionInfo(major, minor, patch) assert current_version < next_version with open('CHANGELOG.md') as file: changelog = file.read().replace( '## [Unreleased]', '## [Unreleased]\n### Added\n\n### Changed\n\n### Fixed\n\n## [{}] - {}' .format(current_version, date.today().strftime('%Y-%m-%d')))
from materialize.git import get_version_tags from materialize.mzcompose import ( Kafka, Materialized, Postgres, SchemaRegistry, Testdrive, Workflow, Zookeeper, ) # # Determine the list of versions to be tested # min_tested_tag = VersionInfo.parse(os.getenv("MIN_TESTED_TAG", "0.8.0")) all_tags = [ tag for tag in get_version_tags(fetch=False) if tag.prerelease is None ] if not all_tags: raise error.MzRuntimeError( "No tags found in current repository. Please run git fetch --all to obtain tags." ) all_tested_tags = sorted([tag for tag in all_tags if tag >= min_tested_tag]) # The Mz options that are valid only at or above a certain version mz_options = {VersionInfo.parse("0.9.2"): "--persistent-user-tables"} #
def version(): return VersionInfo(major=1, minor=2, patch=3, prerelease="alpha.1.2", build="build.11.e0f985a")
def version_compare( compare_ver, min_version, max_version, min_affected_version_excluding=None, max_affected_version_excluding=None, ): """Function to check if the given version is between min and max version >>> utils.version_compare("3.0.0", "2.0.0", "2.7.9.4") False >>> utils.version_compare("2.0.0", "2.0.0", "2.7.9.4") True >>> utils.version_compare("4.0.0", "2.0.0", "*") True """ # Semver compatible and including versions provided is_min_exclude = False is_max_exclude = False if (not min_version or min_version == "*") and min_affected_version_excluding: min_version = min_affected_version_excluding is_min_exclude = True if (not max_version or max_version == "*") and max_affected_version_excluding: max_version = max_affected_version_excluding is_max_exclude = True if not min_version: min_version = "0" # Perform semver match once we have all the required versions if compare_ver and min_version and max_version: if semver_compatible(compare_ver, min_version, max_version): min_value = VersionInfo.parse(compare_ver).compare(min_version) max_value = VersionInfo.parse(compare_ver).compare(max_version) min_check = min_value > 0 if is_min_exclude else min_value >= 0 max_check = max_value < 0 if is_max_exclude else max_value <= 0 return min_check and max_check # We have an incompatible semver string. Try to convert to semver format compare_semver, comprest = convert_to_semver(compare_ver) min_semver, minrest = convert_to_semver( "0.0.0" if min_version == "*" else min_version ) max_semver, maxrest = convert_to_semver(max_version) if ( compare_semver and min_semver and max_semver and not comprest and not minrest and not maxrest ): min_value = compare_semver.compare(min_semver) max_value = compare_semver.compare(max_semver) # If we are confident about the versions post upgrade then return True min_check = min_value > 0 if is_min_exclude else min_value >= 0 max_check = max_value < 0 if is_max_exclude else max_value <= 0 return min_check and max_check compare_ver_build = None min_version_build = None max_version_build = None # Extract any build string such as alpha or beta if "-" in compare_ver and compare_ver != "-": tmpA = compare_ver.split("-") compare_ver = tmpA[0] compare_ver_build = tmpA[1] if "-" in min_version and min_version != "-": tmpA = min_version.split("-") min_version = tmpA[0] min_version_build = tmpA[1] if not max_version or max_version == "-": max_version = "0" if "-" in max_version and max_version != "-": tmpA = max_version.split("-") max_version = tmpA[0] max_version_build = tmpA[1] if max_version == "*": return True if not min_version or min_version == "*" or min_version == "-": min_version = "0" if compare_ver == "-" or compare_ver == "*": compare_ver = "0" # Simple case if not compare_ver_build and not min_version_build and not max_version_build: if (compare_ver == min_version and not is_min_exclude) or ( compare_ver == max_version and not is_max_exclude ): return True compare_ver_parts = str(compare_ver).split(".") min_version_parts = str(min_version).split(".") max_version_parts = str(max_version).split(".") normal_ver_len = version_len(compare_ver) if version_len(min_version) > normal_ver_len: normal_ver_len = version_len(min_version) if version_len(max_version) > normal_ver_len: normal_ver_len = version_len(max_version) compare_ver_num = normalise_num(compare_ver, normal_ver_len) min_version_num = normalise_num(min_version, normal_ver_len) max_version_num = normalise_num(max_version, normal_ver_len) # If all versions follow proper versioning then perform a simple numerical comparison if ( len(compare_ver_parts) == len(min_version_parts) and len(compare_ver_parts) == len(max_version_parts) and len(str(compare_ver_num)) == len(str(min_version_num)) and len(str(compare_ver_num)) == len(str(max_version_num)) ): if compare_ver_num >= min_version_num and compare_ver_num <= max_version_num: if (compare_ver_build == min_version_build and not is_min_exclude) or ( compare_ver_build == max_version_build and not is_max_exclude ): return True if not compare_ver_build and (min_version_build or max_version_build): if ( compare_ver_num == min_version_num and compare_ver_num == max_version_num ): return False if max_version_build and compare_ver_num == max_version_num: return False else: return True return True normal_len = len(compare_ver_parts) if len(min_version_parts) > normal_len: normal_len = len(min_version_parts) if len(max_version_parts) > normal_len: normal_len = len(max_version_parts) # Normalise the version numbers to be of same length compare_ver = normalise_version_str(compare_ver, normal_len) min_version = normalise_version_str(min_version, normal_len) max_version = normalise_version_str(max_version, normal_len) compare_ver_parts = str(compare_ver).split(".") min_version_parts = str(min_version).split(".") max_version_parts = str(max_version).split(".") for i in range(0, normal_len): if ( not compare_ver_parts[i].isdigit() or not min_version_parts[i].isdigit() or not max_version_parts[i].isdigit() ): if ( compare_ver_parts[i] == min_version_parts[i] and compare_ver_parts[i] == max_version_parts[i] ): continue else: return False elif int(compare_ver_parts[i]) >= int(min_version_parts[i]) and int( compare_ver_parts[i] ) <= int(max_version_parts[i]): continue elif int(compare_ver_parts[i]) < int(min_version_parts[i]) or int( compare_ver_parts[i] ) > int(max_version_parts[i]): if i == 0: return False if i == 1 and int(compare_ver_parts[i - 1]) <= int( max_version_parts[i - 1] ): return False if i >= 2 and int(compare_ver_parts[i - 1]) == int( max_version_parts[i - 1] ): return False # This will remove false positives where the comparison version is an excluded version if (is_min_exclude and compare_ver == min_version) or ( is_max_exclude and compare_ver == max_version ): return False return True
def test_parse_method_for_version_info(): s_version = "1.2.3-alpha.1.2+build.11.e0f985a" v = VersionInfo.parse(s_version) assert str(v) == s_version
def run(self, **kwargs): plugins = refresh_plugins() self.data['release-notes'] = input('Release Title: ') # print('Release description: (^c to exit)') # try: # while True: # "\n" + self.data['release-notes'] += input('> ') # except KeyboardInterrupt: # print('\n') version = VersionInfo.parse(self.data['latest_build']) version = str( VersionInfo(major=version.major, minor=version.minor, patch=version.patch, prerelease=version.prerelease)) runs = {} if self.options == []: for x in self.data['release']: if x in plugins['release']: runs[x] = plugins['release'][x]['order'] for x in self.data['build']: if x in plugins['build']: if plugins['build'][x]['release']: runs[x] = plugins['build'][x]['order'] for x in self.data: if x in plugins['core']: if plugins['core'][x]['release']: runs[x] = plugins['core'][x]['order'] else: for x in self.options: if x in self.data['release'] and x in plugins['release']: runs[x] = plugins['release'][x]['order'] elif x in self.data['build']: if plugins['build'][x]['release']: runs[x] = plugins['build'][x]['order'] elif x in self.data: if plugins['core'][x]['release']: runs[x] = plugins['core'][x]['order'] else: print('sailboat: error: {} is not a valid release plugin.'.format(x)) return runstemp = [] for x in runs: runstemp.append(f"{runs[x]}::{x}") runstemp.sort() runs = [] for x in runstemp: order, name = x.split('::') runs.append(name) input(f'Press enter to release version {version} the following ways:\n\t- ' + '\n\t- '.join(runs) + '\n\n>>>') dones = [] for release_plugin in runs: if release_plugin in dones: continue print(self.section(release_plugin + ":")) plugin_type, temp = get_plugin(release_plugin) temp = temp( data=self.data, options=[self.options], name=release_plugin, prefix=self.prefix, version=version ) temp.release() if temp._type == 'core': self.dat = temp.data else: self.data[temp._type][release_plugin] = temp.data[temp._type][release_plugin] dones.append(release_plugin) print() self.data['latest_release'] = version
def test_should_return_versioninfo_with_replaced_parts(version, parts, expected): assert VersionInfo.parse(version).replace( **parts) == VersionInfo.parse(expected)
def test_parse_method_for_version_info(): s_version = "1.2.3-alpha.1.2+build.11.e0f985a" v = VersionInfo.parse(s_version) assert str(v) == s_version