예제 #1
0
def semver_compatible(compare_ver, min_version, max_version):
    """Method to check if all version numbers are semver compatible"""
    return (
        VersionInfo.isvalid(compare_ver)
        and VersionInfo.isvalid(min_version)
        and VersionInfo.isvalid(max_version)
    )
예제 #2
0
    def get_from_python(self, lookup: str, key=None):
        """Retrieves the version of a Python package by importing the package
        and printing the ``__version__`` value.

        Returns `None` if the package version cannot be printed.
        """
        if key is None:
            key = lookup
        self.contdata[key] = self.ERROR
        p = subprocess.run(  # nosec B602
            "python -c 'import {} as x; print(x.__version__);'".format(lookup),
            shell=True,
            capture_output=True,
            check=False,
        )
        result = p.stdout.decode("utf-8")
        if p.returncode != 0:
            logger.info(
                "Failed to get '%s' package version from python: %s",
                lookup,
                result,
            )
            return
        for line in result.split("\n"):
            if VersionInfo.isvalid(line):
                result = line
                break
        self.contdata[key] = result.strip()
예제 #3
0
def isValidSemver(version):
    """Semantic version number - determines whether the version is qualified.
    The format is MAJOR.Minor.PATCH, more with https://semver.org
    """
    if version and isinstance(version, string_types):
        return VersionInfo.isvalid(version)
    return False
예제 #4
0
def update(ctx, all):  # pragma: no cover
    """
    Checks for out-of-date modules on the connected CIRCUITPYTHON device, and
    prompts the user to confirm updating such modules.
    """
    logger.info("Update")
    # Grab out of date modules.
    modules = [
        m for m in find_modules(ctx.obj["DEVICE_PATH"], get_bundles_list())
        if m.outofdate
    ]
    if modules:
        click.echo("Found {} module[s] needing update.".format(len(modules)))
        if not all:
            click.echo("Please indicate which modules you wish to update:\n")
        for module in modules:
            update_flag = all
            if VERBOSE:
                click.echo("Device version: {}, Bundle version: {}".format(
                    module.device_version, module.bundle_version))
            if isinstance(
                    module.bundle_version,
                    str) and not VersionInfo.isvalid(module.bundle_version):
                click.secho(
                    f"WARNING: Library {module.name} repo has incorrect __version__"
                    "\n\tmetadata. Circup will assume it needs updating."
                    "\n\tPlease file an issue in the library repo.",
                    fg="yellow",
                )
                if module.repo:
                    click.secho(f"\t{module.repo}", fg="yellow")
            if not update_flag:
                if module.mpy_mismatch:
                    click.secho(
                        f"WARNING: '{module.name}': mpy format doesn't match the"
                        " device's Circuitpython version. Updating is required.",
                        fg="yellow",
                    )
                    update_flag = click.confirm("Do you want to update?")
                elif module.major_update:
                    update_flag = click.confirm((
                        "'{}' is a Major Version update and may contain breaking "
                        "changes. Do you want to update?".format(module.name)))
                else:
                    update_flag = click.confirm("Update '{}'?".format(
                        module.name))
            if update_flag:
                # pylint: disable=broad-except
                try:
                    module.update()
                    click.echo("Updated {}".format(module.name))
                except Exception as ex:
                    logger.exception(ex)
                    click.echo(
                        "Something went wrong, {} (check the logs)".format(
                            str(ex)))
                # pylint: enable=broad-except
        return
    click.echo("None of the modules found on the device need an update.")
예제 #5
0
 def test_validate_semver(self):
     # It follows strictly the 2.0.0 version of the SemVer scheme.
     # For more information: https://semver.org/spec/v2.0.0.html
     assert isinstance(__version__, str)
     assert VersionInfo.isvalid(__version__), (
         "Invalid semantic-version."
         "For more information: https://semver.org/spec/v2.0.0.html")
     assert __version__ == PYCLN_METADATA["version"]
예제 #6
0
class TestMetadata:
    """`__init__.py`/`pyproject.toml` test case."""
    def test_pyproject_path(self):
        assert PYPROJECT_PATH.is_file()
        assert str(PYPROJECT_PATH).endswith("pyproject.toml")

    def test_doc(self):
        assert isinstance(__doc__, str)
        assert len(__doc__) > 30, "Too short description!"
        assert len(__doc__) <= 100, "Too long description!"
        assert __doc__ == PYCLN_METADATA["description"]

    def test_name(self):
        assert isinstance(__name__, str)
        assert __name__ == PYCLN_METADATA["name"] == "pycln"

    def test_validate_semver(self):
        # It follows strictly the 2.0.0 version of the SemVer scheme.
        # For more information: https://semver.org/spec/v2.0.0.html
        assert isinstance(__version__, str)
        assert VersionInfo.isvalid(__version__), (
            "Invalid semantic-version."
            "For more information: https://semver.org/spec/v2.0.0.html")
        assert __version__ == PYCLN_METADATA["version"]

    @pytest.mark.skipif(
        getenv("CI", "false") != "true"
        or not VersionInfo.isvalid(__version__),
        reason="Invalid semantic-version.",
    )
    def test_compare_semver(self):
        # It follows strictly the 2.0.0 version of the SemVer scheme.
        # For more information: https://semver.org/spec/v2.0.0.html
        pycln_json = requests.get(PYCLN_PYPI_JSON_URL, timeout=5)
        latest_version = pycln_json.json()["info"]["version"]
        current_version = VersionInfo.parse(__version__)
        assert current_version.compare(latest_version) > -1, (
            f"Current version ({current_version}) can't be less than the "
            f"latest released ({PYCLN_PYPI_URL}) version ({latest_version})!")

    @pytest.mark.parametrize(
        "value, expec_err, expec_exit_code",
        [
            pytest.param(True, Exit, 0, id="value=True"),
            pytest.param(False, sysu.Pass, None, id="value=False"),
        ],
    )
    def test_version_callback(self, value, expec_err, expec_exit_code):
        with sysu.std_redirect(sysu.STD.OUT) as stream:
            with pytest.raises(expec_err):
                version_callback(value)
                raise sysu.Pass()
            stdout = stream.getvalue()
            if expec_err is Exit:
                assert __version__ in stdout
            else:
                assert not stdout
예제 #7
0
def dockerhub_versions(url: str) -> ArchVersions:
    dh_res = requests.get(url)
    dh_result = dh_res.json()
    out: ArchVersions = {arch: [] for arch in ARCHITECTURES}
    for release in dh_result["results"]:
        version_str = release["name"].split("-")[0]
        if VersionInfo.isvalid(version_str):
            for image in release["images"]:
                arch = image["architecture"]
                if arch in ARCHITECTURES:
                    out[arch].append(to_version(version_str))
    logging.debug("Dockerhub versions=%s", out)
    return out
예제 #8
0
    def is_valid(string: str) -> bool:
        """Return whether a version is valid.

        Parameters
        ----------
        string : `str`
            The version as a string.

        Returns
        -------
        result : `bool`
            Whether the version is valid.
        """
        version = string[1:] if string.startswith("v") else string
        return VersionInfo.isvalid(version)
예제 #9
0
    def __init__(self, *, workspace_id: str, name: str, terraform_version: str,
                 auto_apply: bool, is_locked: bool, working_directory: str,
                 agent_pool_id: str, execution_mode: str,
                 speculative: bool):  # pragma: no cover
        self.workspace_id = workspace_id
        self.name = name

        self.terraform_version = terraform_version
        self.parsed_terraform_version: Optional[VersionInfo] = None
        if VersionInfo.isvalid(terraform_version):
            self.parsed_terraform_version: VersionInfo = VersionInfo.parse(
                terraform_version)
        self.is_auto_updating: bool = self.terraform_version == LATEST_VERSION

        self.auto_apply = auto_apply
        self.is_locked = is_locked
        self.working_directory = working_directory
        self.agent_pool_id = agent_pool_id
        self.execution_mode = execution_mode
        self.speculative = speculative
예제 #10
0
def sanitize_version(_ctx: Optional[click.Context],
                     _param: Optional[Union[click.Option, click.Parameter]],
                     value: str
                     ) -> str:
    """Sanitize a version number by stripping git tag ref and leading "v".

    To be used as the callback of a click option or parameter.

    Args:
        ctx: Click context object.
        param: The click option or parameter the callback is being used with.
        value: Value passed to the option or parameter from the CLI.

    Returns:
        str: The SemVer version number.

    """
    version = value.replace('refs/tags/', '')  # strip git ref
    if version.startswith('v'):  # strip leading "v"
        version = version[1:]
    if VersionInfo.isvalid(version):  # valid SemVer
        return version
    raise ValueError(f'version of "{version}" does not follow SemVer')
예제 #11
0
def test_should_versioninfo_isvalid():
    assert VersionInfo.isvalid("1.0.0") is True
    assert VersionInfo.isvalid("foo") is False
예제 #12
0
def main(ids,arguments,nointeraction=False):
	# ============== Get Data ===============================================
	if not os.path.isfile('.'+os.sep+'sailboat.toml'):
		print('Please create a config file with `sailboat wizard` first.')
		sys.exit(0)
	try:
		data = toml.loads(open('.'+os.sep+'sailboat.toml').read())
	except toml.decoder.TomlDecodeError as e:
		print('Config error:\n\t'+str(e))
		exit()
	# ============== Get VersionInfo ===============================================
	if 'latest_build' not in data:
		data['latest_build'] = '0.0.1'
	if len(ids) >= 2: #Something provided
		if VersionInfo.isvalid(ids[1]):
			version = ids[1]
		elif ids[1].startswith('maj'):
			version = str(VersionInfo.parse(data['latest_build']).bump_major())
		elif ids[1].startswith('min'):
			version = str(VersionInfo.parse(data['latest_build']).bump_minor())
		elif ids[1].startswith('pat'):
			version = str(VersionInfo.parse(data['latest_build']).bump_patch())
		elif ids[1].startswith('pre') or ids[1].startswith('dev'):
			version = str(VersionInfo.parse(data['latest_build']).bump_prerelease())
		else:
			print('Unknown version `{}`'.format(ids[1]))
			sys.exit(0)
	else:
		try:
			latestcommit = os.popen('git rev-parse --short HEAD').read().replace('\n','')
		except KeyboardInterrupt:
			latestcommit = "build"
		if latestcommit in data['latest_build']:
			version = str(VersionInfo.parse(data['latest_build']).bump_build())
		else:
			version = str(VersionInfo.parse(data['latest_build']).replace(build=latestcommit+".1"))
	if compare(version,data['latest_build']) == -1 and not (ids[1].startswith('pre') or ids[1].startswith('dev')):
		if input(f'\u001b[31mYou are building a version ({version}) that comes before the previously built version ({data["latest_build"]}). Do you wish to continue? [y/n] \u001b[0m')[0]=='n' or nointeraction:
			print()
			sys.exit(0)
	print('\nPreparing to build version {}\n'.format(version))

	# ============== Pre-build script ===============================================
	if 'build_script' in data['build']:
		try:
			buildscript = __import__(data['build']['build_script'].replace('.py',''))
		except BaseException as e:
			print('Error with custom prebuild script:\n'+str(e))
			pass
	try:
		newdata = buildscript.pre(version,data)
		if isinstance(newdata,dict):
			data = newdata
	except BaseException as e:
		print('Error with custom prebuild script:\n\t`'+str(e)+"`")
		pass
	# ============== Show what will happen ===============================================
	print('This command will build the following:')
	print('\t- Generate a correct directory structure, setup.py, .gitignore, etc...')
	only=False
	for x in arguments:
		if '-only' in x[0]:
			only=True
			break
	dopypi,dobrew,domac,dowin,doact,doset=False,False,False,False,False,False
	if only:
		if ('--pypi-only','') in arguments:
			dopypi = True
		elif ('--homebrew-only','') in arguments:
			dobrew = True
		elif ('--windows-only','') in arguments and sys.platform.startswith('win'):
			dowin = True
		elif ('--mac-only','') in arguments and sys.platform.startswith('darwin'):
			domac = True
		elif ('--actions-only','') in arguments:
			doact = True
		elif ('--setup-only','') in arguments:
			doset = True
		elif ('--unix-only','') in arguments:
			donix = True
		
	elif 'actions_only' in data['build'] and data['build']['actions_only'] and not ('CI' in os.environ and os.environ['CI']=="TRUE"):
		dopypi = True
		dobrew = True
		dowin = False
		domac = False
		donix = False
		doact = True
		doset = True
	else:
		dopypi = True
		dobrew = data['build']['homebrew']
		dowin = data['build']['windows'] and sys.platform.startswith('win')
		domac = data['build']['mac'] and sys.platform.startswith('darwin')
		donix = data['build']['unix'] and sys.platform.startswith('l')
		doact = data['build']['actions']
		doset =True
	if ('--no-installer','') in arguments:
		doinstall=False
	else:
		doinstall=data['build']['installer']
	if dopypi:
		print('\t- A distributable Python module.')
	if dobrew:
		print('\t- A Homebrew formula.')
	if dowin:
		installer = " with a .msi installer." if doinstall else "."
		print('\t- A Windows app'+installer)
	if domac:
		installer = " with a .dmg installer." if doinstall else "."
		print('\t- A Mac app'+installer)
	if donix:
		print('\t- A Unix executable')
	
	if doact:
		print('\t- A GitHub Actions file for building Mac and Windows apps and publishing a Github release.')
	if not nointeraction:
		input('Press enter to continue.')
	# ============== Create bin Script ===============================================
	try:
		os.mkdir('bin')
	except:
		pass
	bins = []
	for commandname in data['build']['commands'].keys():
		if data['build']['commands'][commandname] == '':
			open("bin"+os.sep+commandname,'w+').write(f"#!"+os.sep+"usr"+os.sep+"bin"+os.sep+f"env bash\npython3 -m {data['short_name']} $@")
			bins.append('bin'+os.sep+commandname)
	# ============== Get module names ===============================================
	if 'no_import' not in data['resources']:
		data['resources']['no_import'] = []
	mods = []
	for x in glob.glob(data['short_name']+os.sep+'*.py'):
		f = open(x)
		mods += re.findall('(?m)(?:from[ ]+(\S+)[ ]+)?import[ ]+\S+?:[ ]+as[ ]+\S+?[ ]*$',f.read())
		f.close()
	modules = []
	for x in mods:
		modules.append(x[1].split('.')[0])
	for module in set(modules):
		if module not in data['resources']['no_import'] and ( module!= data['short_name'] and module not in sys.builtin_module_names and module not in data['resources']['modules']):
			print('Checking for {} on PyPi...'.format(module))
			response = requests.get("https://pypi.python.org/pypi/{}/json".format(module))
			if response.status_code == 200:
				data['resources']['modules'].append(module)
			else:
				data['resources']['no_import'].append(module)

	# ============== Generate setup.py ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating setup.py\u001b[0m')

	if doset:
		if 'custom_setup' in data:
			cu=str(data['custom_setup'])
		else:
			cu=str({})
		with open(prefix+os.sep+'setup.py.template') as datafile:
			template = datafile.read()

		entries = []
		for commandname in data['build']['commands'].keys():
			if data['build']['commands'][commandname]!="":
				modname = ".".join(data['build']['commands'][commandname].split('.')[:-1])
				funcname = data['build']['commands'][commandname].split('.')[-1]
				entries.append(commandname+"="+data["short_name"]+"."+modname+":"+funcname)
		try:
			pyv = version.split('+')[0]
		except:
			pyv = version

		setup = template.format(
			**data,
			**data['resources'],
			cu=cu,
			bins=bins,
			version = pyv,
			entry_points = entries
			
		)
		open('setup.py','w+').write(setup)

	# ============== Generate directory structure ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating Directory Structure\u001b[0m')

	if not os.path.isfile('.gitignore'):
		open('.'+os.sep+'.gitignore','w+').write(open(prefix+os.sep+'gitignore.template').read().replace('/',os.sep))
	source_dir = os.getcwd()
	target_dir = data["short_name"]+os.sep
	types = ('*.py',*data['resources']["data_files"])
	file_names = []
	for files in types:
		file_names.extend(glob.glob(files))
	if not os.path.isdir(target_dir):
		os.mkdir(target_dir)
	try:
		bs = data['build']['build_script']
	except:
		bs = "RANDOM-----edfskjsdhflkjdhflksdjhflkasjdhflkasjdhflkasjdhflkajsdhflkjadshf"
	for file_name in file_names:
		if file_name in ("setup.py","sailboat.toml",bs):
			continue
		shutil.move(os.path.join(source_dir, file_name), target_dir+os.sep+file_name)
	for filename in glob.glob(target_dir+os.sep+'LICE*'):
		shutil.copyfile(filename,'LICENSE')
	if not os.path.isfile(target_dir+'__init__.py'):
		open(target_dir+'__init__.py','w+').write('# This file must exist, empty or not')
	if data['resources']['file']!="" and not os.path.isfile(data['short_name']+os.sep+'__main__.py'):
		try:
			os.rename(data['short_name']+os.sep+data['resources']['file'],data['short_name']+os.sep+'__main__.py')
			open(data['short_name']+os.sep+data['resources']['file'],'w+').write('# Please edit __main__.py for the main code. Thanks!\n(you can delete this file.)')
		except FileNotFoundError:
			pass
	
	# ============== Generate pypi files ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating PyPi files...\u001b[0m')

	if dopypi:
		try:
			shutil.rmtree('dist')
		except:
			pass
		os.system('python3 .'+os.sep+'setup.py bdist_wheel sdist -d dist'+os.sep+'pypi')
		try:
			shutil.rmtree('build')
		except:
			pass
		for x in glob.glob('dist'+os.sep+'*.whl'):
			os.rename(x,x.replace('dist'+os.sep,'dist'+os.sep+'pypi'+os.sep))
		for x in glob.glob('*.egg-info'):
			shutil.rmtree(x)

	# ============== Generate homebrew file ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating Homebrew file...\u001b[0m')

	if dobrew:
		retmp = '   resource "{name}" do\n      url "{url}"\n      sha256 "{sha256}"\n   end\n'
		resources = ''
		for modulename in data['resources']['modules']:
			req = requests.get('https://pypi.org/pypi/{}/json'.format(modulename)).json()
			versionPy = req['info']['version']
			url = req['releases'][versionPy][0]['url']
			sha256 = req['releases'][versionPy][0]['digests']['sha256']
			if not (url.endswith('.tar.gz') or url.endswith('.zip')):
				try:
					url = req['releases'][versionPy][1]['url']
					sha256 = req['releases'][versionPy][1]['digests']['sha256']
				except:
					continue
			resources+=retmp.format(name=modulename,url=url,sha256=sha256)
		os.makedirs('dist'+os.sep+'homebrew')
		f = open('dist'+os.sep+'homebrew'+os.sep+'{name}.rb'.format(name=data['name']),'w+')
		f.write(open(prefix+os.sep+'brew.rb').read().format(
			**data,
			resources2 = resources,
			version = version
		))
		f.close()
	# ============== Generate w/Pyinstaller ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating Pyinstaller files...\u001b[0m')

	# domac = True
	if dowin or domac or donix:
		try:
			import PyInstaller.__main__
		except:
			print('Installing PyInstaller...')
			os.system('pip3 install pyinstaller')
			import PyInstaller.__main__
		
		mods = []
		for x in data['resources']['modules']:
			mods.append('--hidden-import')
			mods.append(x)

		datafiles = []
		for x in data['resources']['data_files']:
			for g in glob.glob(data['short_name']+os.sep+x):
				datafiles.append('--add-data')
				datafiles.append(g+os.pathsep+g.replace(data['short_name']+os.sep,''))
		typ = '--nowindowed' if data['build']['type']=='1' else '--noconsole'

		ico = ['--icon',data['resources']['icon']] if 'icon' in data['resources'] else []
		options = [
			data['short_name']+os.sep+'__main__.py',
			'--onefile',
			'--name',
			data['name'] if ('-n','')not in arguments else 'app',
			'--distpath',
			'.'+os.sep+'dist'+os.sep+'pyinstaller',
			*mods,
			*datafiles,
			typ,
			*ico,
			'--osx-bundle-identifier',
			data['build']['bundle_id']
		]
		print(options)
		PyInstaller.__main__.run(options)
		try:
			print('removing '+data['name']+".spec...")
			os.remove(data['name']+".spec")
		except:
			print('removing app.spec...')
			os.remove("app.spec")			
	# ============== Mac .app Bundle ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating Mac .app bundle...\u001b[0m')
	if domac:
		os.chdir('dist')

		os.mkdir(data['name'])
		os.chdir(data['name'])

		os.mkdir('Contents')
		os.chdir('Contents')

		os.mkdir('MacOS')
		os.mkdir('Resources')

		infoPlist = open('Info.plist','w+')
		infoPlist.write(open(prefix+'/info.plist.xml').read().format(
			**data,
			**data['build'],
			version = version
		))
		infoPlist.close()

		shutil.copy('./../../pyinstaller/'+data['name'],'MacOS')
		os.chdir('./../../..')

		os.rename('./dist/'+data['name'],'./dist/'+data['name']+".app")
	else:
		print('not generating mac .app bundle because on {} not mac.'.format(sys.platform))
	# ============== Generate Installer Package ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating Installer Package...\u001b[0m')

	if sys.platform.startswith('win') and doinstall and False:#WINDOWS
		os.system('pip install distro')
		os.system('pip install git+https://github.com/x24git/wixpy')
		d = open(prefix+os.sep+'wixpy.template.json').read().format(
			**data,
			version=version,
			icns=data['resources']['icon'],
			keywo=", ".join(data['keywords'])
		)
		open('wixpy.json','w+').write(d)
		print(d)
		print('STARTING WIXPY...')
		os.system('wix.py wixpy.json')

	elif sys.platform.startswith('darwin'):#MAC
		try:
			import dmgbuild
		except:
			print('Installing dmgbuild')
			os.system('pip3 install dmgbuild')
			import dmgbuild
		del dmgbuild
		if not os.path.isdir('build'):
			os.mkdir('build')
		open('build'+os.sep+'settings.py','w+').write(open(prefix+os.sep+'settings.py.template').read().format(
			**data,
			version=version,
			icns=data['resources']['icon'],
			keywo=", ".join(data['keywords'])
		))
		os.system(f'cat build/settings.py;dmgbuild -s .{os.sep}build{os.sep}settings.py "{data["name"]} Installer" ./{data["name"]}.dmg')

	else:
		print(f'Installer creation not yet supported for {sys.platform}!')
	# ============== Generate Github Actions Workflow ===============================================
	print('\n\n\u001b[4m\u001b[1;36mGenerating GitHub Actions File...\u001b[0m')
	if doact:
		try:
			oldact = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml').read().split('\n')[0]
		except:
			oldact="\n"
		try:
			f = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml','w+')
		except:
			os.system('mkdir -p .github'+os.sep+'workflows'+os.sep)
			f = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml','w+')
		newdata = open(prefix+os.sep+'sailboat.yml.template').read().format(
			**data,
			mac=""if data['build']['mac']else"#",
			windows=""if data['build']['windows']else"#",
			win_ext=".exe"if data['build']['installer']else".exe",
			mac_ext=".dmg"if not data['build']['unix']else"",
			u=""if data['build']['unix']else"#"
		).replace('\t','  ')
		f.write(newdata)

		f.close()
		newact = open('.github'+os.sep+'workflows'+os.sep+'sailboat.yml').read().split('\n')[0]
		data['build']['actions_built_latest'] = newact != oldact
		print(oldact)
		print(newact)
	# ============== Post build ===============================================
	try:
		newdata = buildscript.post(version,data)
		if isinstance(newdata,dict):
			data = newdata
	except:
		pass
	# ============== Save VersionInfo ===============================================
	data['latest_build'] = version
	open('sailboat.toml','w+').write(toml.dumps(data))
	os.system('python .'+os.sep+'setup.py develop')
예제 #13
0
    def run(self, **kwargs):
        manager = enlighten.get_manager()
        term = blessed.Terminal()
        if len(self.options) > 0 and self.options[0] == 'help':
            print(
                "usage: sail build [version (or) increment] [plugins ...]\n\tThis command builds your project using the "
                "sailboat.toml file.\n\tValid options for version:\n\t\t- Valid semver.org string: set that as "
                "version\n\t\t- `major`: increments the major version by one.\n\t\t- `minor`: increments the minor "
                "version by one.\n\t\t- `patch`: increments the patch version by one.\n\t\t- `pre`: increments the "
                "prerelease version by one.\n\t\t- None: increments build version by one.")
            return
        # Get Version =====================================================================================
        if 'latest_build' not in self.data:
            self.data['latest_build'] = '0.0.1'
        if len(self.options) >= 1:  # Something provided
            if VersionInfo.isvalid(self.options[0]):
                version = self.options[0]
            elif self.options[0].startswith('maj'):
                version = str(VersionInfo.parse(self.data['latest_build']).bump_major())
            elif self.options[0].startswith('min'):
                version = str(VersionInfo.parse(self.data['latest_build']).bump_minor())
            elif self.options[0].startswith('pat'):
                version = str(VersionInfo.parse(self.data['latest_build']).bump_patch())
            elif self.options[0].startswith('+') or self.options[0].startswith('build'):
                version = str(VersionInfo.parse(self.data['latest_build']).bump_build())
            else:
                print('Unknown version `{}`'.format(self.options[0]))
                return
            if '.pre' in self.options[0] or '.dev' in self.options[0]:
                version = str(VersionInfo.parse(self.data['latest_build']).bump_prerelease())

        else:
            try:
                latestcommit = os.popen('git rev-parse --short HEAD').read().replace('\n', '')
            except KeyboardInterrupt:
                latestcommit = "build"
            if latestcommit in self.data['latest_build']:
                version = str(VersionInfo.parse(self.data['latest_build']).bump_build())
            else:
                version = str(VersionInfo.parse(self.data['latest_build']).replace(build=latestcommit + ".1"))
        if compare(version, self.data['latest_build']) == -1 and not (
                self.options[0].startswith('pre') or self.options[0].startswith('dev')):
            if input(
                    term.red + f'You are building a version ({version}) that comes before the previously built version ({self.data["latest_build"]}). Do you wish to continue? [y/n]' + term.normal)[
                0] == 'n' or ('-y' in self.options or '--no-interaction' in self.options):
                print()
                return
        status_format = '{program}{fill}{current}{fill}{version}'
        status_bar = manager.status_bar(status_format=status_format, color='white_on_blue', program=self.data['name'],
                                        current='building directory structure', version=version)
        print('\nPreparing to build version {}\n'.format(version))
        self.data['latest_build'] = version
        if len(self.options[1:]) == 0:
            notdones = [*self.data['build'].keys()]
        else:
            notdones = self.options[1:]
        progress_bar = manager.counter(total=len(notdones) + 3, desc='Build', unit='jobs', color="grey")
        prebuild = progress_bar.add_subcounter('white')
        postbuild = progress_bar.add_subcounter('darkgrey')
        # =====================================================================================
        if not os.path.isfile('.gitignore'):
            open('.' + os.sep + '.gitignore', 'w+', encoding="utf8").write(
                self.getResource('resources' + os.sep + 'gitignore.template').read().replace('/', os.sep))
        source_dir = os.getcwd()
        target_dir = self.data["short_name"] + os.sep
        types = ('*.py', *self.data['resources']["data_files"])
        file_names = []
        for files in types:
            file_names.extend(glob.glob(files))
        if not os.path.isdir(target_dir):
            os.mkdir(target_dir)
        for file_name in file_names:
            if file_name in ("setup.py", "sailboat.toml", self.data['resources']['file']):
                continue
            shutil.move(os.path.join(source_dir, file_name), target_dir + os.sep + file_name)
        for filename in glob.glob(target_dir + os.sep + 'LICE*'):
            shutil.copyfile(filename, 'LICENSE')
        if not os.path.isfile(target_dir + '__init__.py'):
            open(target_dir + '__init__.py', 'w+', encoding="utf8").write('# This file must exist, empty or not')
        if self.data['resources']['file'] != "" and not os.path.isfile(
                self.data['short_name'] + os.sep + '__main__.py') and os.path.isfile(self.data['resources']['file']):
            try:
                os.rename(self.data['resources']['file'], self.data['short_name'] + os.sep + '__main__.py')
            except:
                pass
        time.sleep(0.1);
        status_bar.update(current="scanning imports")
        prebuild.update()
        # =====================================================================================
        print('Scanning module imports...')
        if 'no_import' not in self.data['resources']:
            self.data['resources']['no_import'] = []
        mods = []
        for x in glob.glob(self.data['short_name'] + os.sep + '*.py'):
            f = open(x, encoding="utf8")
            b = f.read()
            f.close()
            mods += re.findall('^import[ ]+(.*)', b, re.M)
            mods += re.findall('^from[ ]+(.*) import', b, re.M)
        modules = []
        for x in set(mods):
            modules.append(x.split('.')[0])
        for module in set(modules):
            if module not in self.data['resources']['no_import'] and (
                    module != self.data['short_name'] and module not in sys.builtin_module_names and module not in
                    self.data['resources']['modules']):
                print('Checking for {} on PyPi...'.format(module))
                response = requests.get("https://pypi.python.org/pypi/{}/json".format(module))
                if response.status_code == 200:
                    self.data['resources']['modules'].append(module)
                else:
                    self.data['resources']['no_import'].append(module)
        time.sleep(0.1);
        status_bar.update(current="removing previous builds")
        prebuild.update()
        # =====================================================================================
        try:
            shutil.rmtree('dist')
        except FileNotFoundError:
            pass
        dones = []
        for build_plugin in progress_bar(notdones):
            if build_plugin not in self.options and (
                    '_run' in self.data['build'][build_plugin] and not self.data['build'][build_plugin]['_run']):
                continue
            if build_plugin in dones:
                continue
            if build_plugin not in self.data['build'].keys():
                continue
            elif '_needs' in self.data['build'][build_plugin]:
                if isinstance(self.data['build'][build_plugin]['_needs'], str):
                    self.data['build'][build_plugin]['_needs'] = [self.data['build'][build_plugin]['_needs']]
                for x in self.data['build'][build_plugin]['_needs']:
                    if x not in dones:
                        notdones.append(build_plugin)
                        build_plugin = x
            print(term.cyan + term.underline + build_plugin + term.normal + term.nounderline + "\n\n")
            time.sleep(0.2);
            status_bar.update(current=build_plugin)
            try:
                plugin_type, job = get_plugin(build_plugin, plugin_type="build")
                job = job(
                    data=self.data,
                    options=[],
                    name=build_plugin,
                    prefix=self.prefix,
                    version=version
                )
            except PluginNotFound:
                sys.exit(f'You seem to have added the {build_plugin} plugin, but it does not appear to be installed!')
            try:
                job.run()
            except KeyboardInterrupt:
                print('\n\nUser has aborted at step {}.\n\n'.format(build_plugin))
                sys.exit(0)
            except BaseException as error:
                print('\n\nError at step {}:\n\n\t{}\n\n'.format(build_plugin, self.red(traceback.print_exc())))
                sys.exit(1)
            self.data[job._type][build_plugin] = job.data[job._type][build_plugin]
            dones.append(build_plugin)
        time.sleep(0.1);
        status_bar.update(current="running develop")
        print(self.section('Finishing up...'))
        os.system('python3 setup.py develop')
        postbuild.update()
        print(self.section('Built files:'))
        for x in glob.glob(f'.{os.sep}dist{os.sep}*{os.sep}*') +\
                 glob.glob(f'.{os.sep}dist{os.sep}*'):
            print(x)
        time.sleep(0.2);
        status_bar.update(current='Finished Build!')
        manager.stop()
예제 #14
0
    def run(self, plugins, **kwargs):
        if len(self.options) > 0 and self.options[0] == 'help':
            print(
                "usage: sail build [version (increment)]\n\tThis command builds your project using the sailboat.toml file.\n\tValid options for version:\n\t\t- Valid semver.org string: set that as version\n\t\t- `major`: increments the major version by one.\n\t\t- `minor`: increments the minor version by one.\n\t\t- `patch`: increments the patch version by one.\n\t\t- `pre`: increments the prerelease version by one.\n\t\t- None: increments build version by one."
            )
            return
#=====================================================================================
        if 'latest_build' not in self.data:
            self.data['latest_build'] = '0.0.1'
        if len(self.options) >= 1:  #Something provided
            if VersionInfo.isvalid(self.options[0]):
                version = self.options[0]
            elif self.options[0].startswith('maj'):
                version = str(
                    VersionInfo.parse(self.data['latest_build']).bump_major())
            elif self.options[0].startswith('min'):
                version = str(
                    VersionInfo.parse(self.data['latest_build']).bump_minor())
            elif self.options[0].startswith('pat'):
                version = str(
                    VersionInfo.parse(self.data['latest_build']).bump_patch())
            elif self.options[0].startswith(
                    'pre') or self.options[0].startswith('dev'):
                version = str(
                    VersionInfo.parse(
                        self.data['latest_build']).bump_prerelease())
            elif self.options[0].startswith('+') or self.options[0].startswith(
                    'build'):
                version = str(
                    VersionInfo.parse(self.data['latest_build']).bump_build())
            else:
                print('Unknown version `{}`'.format(self.options[0]))
                return
        else:
            try:
                latestcommit = os.popen(
                    'git rev-parse --short HEAD').read().replace('\n', '')
            except KeyboardInterrupt:
                latestcommit = "build"
            if latestcommit in self.data['latest_build']:
                version = str(
                    VersionInfo.parse(self.data['latest_build']).bump_build())
            else:
                version = str(
                    VersionInfo.parse(self.data['latest_build']).replace(
                        build=latestcommit + ".1"))
        if compare(version, self.data['latest_build']) == -1 and not (
                self.options[0].startswith('pre')
                or self.options[0].startswith('dev')):
            if input(
                    f'\u001b[31mYou are building a version ({version}) that comes before the previously built version ({self.data["latest_build"]}). Do you wish to continue? [y/n] \u001b[0m'
            )[0] == 'n' or ('-y' in self.options
                            or '--no-interaction' in self.options):
                print()
                return
        print('\nPreparing to build version {}\n'.format(version))
        self.data['latest_build'] = version
        #=====================================================================================
        print(
            '\n\n\u001b[4m\u001b[1;36mGenerating Directory Structure\u001b[0m')
        print(
            'This step will generate a correct directory structure for your project.'
        )
        if not os.path.isfile('.gitignore'):
            open('.' + os.sep + '.gitignore', 'w+').write(
                self.getResource('resources' + os.sep +
                                 'gitignore.template').read().replace(
                                     '/', os.sep))
        source_dir = os.getcwd()
        target_dir = self.data["short_name"] + os.sep
        types = ('*.py', *self.data['resources']["data_files"])
        file_names = []
        for files in types:
            file_names.extend(glob.glob(files))
        if not os.path.isdir(target_dir):
            os.mkdir(target_dir)
        for file_name in file_names:
            if file_name in ("setup.py", "sailboat.toml",
                             self.data['resources']['file']):
                continue
            shutil.move(os.path.join(source_dir, file_name),
                        target_dir + os.sep + file_name)
        for filename in glob.glob(target_dir + os.sep + 'LICE*'):
            shutil.copyfile(filename, 'LICENSE')
        if not os.path.isfile(target_dir + '__init__.py'):
            open(target_dir + '__init__.py',
                 'w+').write('# This file must exist, empty or not')
        if self.data['resources']['file'] != "" and not os.path.isfile(
                self.data['short_name'] + os.sep +
                '__main__.py') and os.path.isfile(
                    self.data['resources']['file']):
            try:
                os.rename(self.data['resources']['file'],
                          self.data['short_name'] + os.sep + '__main__.py')
            except:
                pass
#=====================================================================================
        print('\n\n\u001b[4m\u001b[1;36mFetching Modules:\u001b[0m')
        print('Scanning module imports...')
        if 'no_import' not in self.data['resources']:
            self.data['resources']['no_import'] = []
        mods = []
        for x in glob.glob(self.data['short_name'] + os.sep + '*.py'):
            f = open(x)
            b = f.read()
            f.close()
            mods += re.findall('^import[ ]+(.*)', b, re.M)
            mods += re.findall('^from[ ]+(.*) import', b, re.M)
        modules = []
        for x in set(mods):
            modules.append(x.split('.')[0])
        for module in set(modules):
            if module not in self.data['resources']['no_import'] and (
                    module != self.data['short_name']
                    and module not in sys.builtin_module_names
                    and module not in self.data['resources']['modules']):
                print('Checking for {} on PyPi...'.format(module))
                response = requests.get(
                    "https://pypi.python.org/pypi/{}/json".format(module))
                if response.status_code == 200:
                    self.data['resources']['modules'].append(module)
                else:
                    self.data['resources']['no_import'].append(module)
#=====================================================================================
        try:
            shutil.rmtree('dist')
        except FileNotFoundError:
            pass
        dones = []
        if len(self.options[1:]) == 0:
            notdones = [*self.data['build'].keys()]
        else:
            notdones = self.options[1:]
        for build_plugin in notdones:
            if build_plugin not in self.options and (
                    '_run' in self.data['build'][build_plugin]
                    and not self.data['build'][build_plugin]['_run']):
                continue
            if build_plugin in dones:
                continue
            if build_plugin not in self.data['build'].keys():
                continue
            elif '_needs' in self.data['build'][build_plugin]:
                if isinstance(self.data['build'][build_plugin]['_needs'], str):
                    self.data['build'][build_plugin]['_needs'] = [
                        self.data['build'][build_plugin]['_needs']
                    ]
                for x in self.data['build'][build_plugin]['_needs']:
                    if x not in dones:
                        notdones.append(build_plugin)
                        build_plugin = x
            print('\n\u001b[4m\u001b[1;36m{}:\u001b[0m'.format(
                build_plugin.title()))
            try:
                dist = plugins['build'][build_plugin]['dist']
                job = pkg_resources.load_entry_point(dist, 'sailboat_plugins',
                                                     build_plugin)
                job = job(data=self.data,
                          options=[],
                          name=build_plugin,
                          prefix=self.prefix,
                          version=version)
            except KeyError as plugin:
                sys.exit(
                    f'You seem to have added the {plugin} plugin, but it does not appear to be installed!'
                )
            try:
                job.run()
            except KeyboardInterrupt:
                print('\n\nUser has aborted at step {}.\n\n'.format(
                    build_plugin))
                return
            except BaseException as error:
                print('\n\nError at step {}:\n\n\t{}\n\n'.format(
                    build_plugin, self.red(error)))
                return
            self.data[job._type][build_plugin] = job.data[
                job._type][build_plugin]
            dones.append(build_plugin)
        print(self.section('Finishing up...'))
        os.system('python3 setup.py develop')
        print(self.section('Built files:'))
        for x in glob.glob(f'.{os.sep}dist{os.sep}*{os.sep}*') + glob.glob(
                f'.{os.sep}dist{os.sep}*'):
            if os.path.isfile(x):
                print(x)
예제 #15
0
    async def run(self):
        logger = self.runtime.logger

        # Load group config and releases.yml
        logger.info("Loading build data...")
        group_config = await util.load_group_config(self.group, self.assembly, env=self._doozer_env_vars)
        releases_config = await util.load_releases_config(self._doozer_working_dir / "ocp-build-data")
        if releases_config.get("releases", {}).get(self.assembly) is None:
            raise ValueError(f"To promote this release, assembly {self.assembly} must be explictly defined in releases.yml.")
        permits = util.get_assembly_promotion_permits(releases_config, self.assembly)

        # Get release name
        assembly_type = util.get_assembly_type(releases_config, self.assembly)
        release_name = util.get_release_name(assembly_type, self.group, self.assembly, self.release_offset)
        # Ensure release name is valid
        if not VersionInfo.isvalid(release_name):
            raise ValueError(f"Release name `{release_name}` is not a valid semver.")
        logger.info("Release name: %s", release_name)

        self._slack_client.bind_channel(release_name)
        slack_response = await self._slack_client.say(f"Promoting release `{release_name}` @release-artists")
        slack_thread = slack_response["message"]["ts"]

        justifications = []
        try:
            # Get arches
            arches = self.arches or group_config.get("arches", [])
            arches = list(set(map(brew_arch_for_go_arch, arches)))
            if not arches:
                raise ValueError("No arches specified.")
            # Get previous list
            upgrades_str: Optional[str] = group_config.get("upgrades")
            if upgrades_str is None and assembly_type != assembly.AssemblyTypes.CUSTOM:
                raise ValueError(f"Group config for assembly {self.assembly} is missing the required `upgrades` field. If no upgrade edges are expected, please explicitly set the `upgrades` field to empty string.")
            previous_list = list(map(lambda s: s.strip(), upgrades_str.split(","))) if upgrades_str else []
            # Ensure all versions in previous list are valid semvers.
            if any(map(lambda version: not VersionInfo.isvalid(version), previous_list)):
                raise ValueError("Previous list (`upgrades` field in group config) has an invalid semver.")

            # Check for blocker bugs
            if self.skip_blocker_bug_check or assembly_type in [assembly.AssemblyTypes.CANDIDATE, assembly.AssemblyTypes.CUSTOM]:
                logger.info("Blocker Bug check is skipped.")
            else:
                logger.info("Checking for blocker bugs...")
                # TODO: Needs an option in releases.yml to skip this check
                try:
                    await self.check_blocker_bugs()
                except VerificationError as err:
                    logger.warn("Blocker bugs found for release: %s", err)
                    justification = self._reraise_if_not_permitted(err, "BLOCKER_BUGS", permits)
                    justifications.append(justification)
                logger.info("No blocker bugs found.")

            # If there are CVEs, convert RHBAs to RHSAs and attach CVE flaw bugs
            impetus_advisories = group_config.get("advisories", {})
            futures = []
            for impetus, advisory in impetus_advisories.items():
                if not advisory:
                    continue
                if advisory < 0:  # placeholder advisory id is still in group config?
                    raise ValueError("Found invalid %s advisory %s", impetus, advisory)
                logger.info("Attaching CVE flaws for %s advisory %s...", impetus, advisory)
                futures.append(self.attach_cve_flaws(advisory))
            try:
                await asyncio.gather(*futures)
            except ChildProcessError as err:
                logger.warn("Error attaching CVE flaw bugs: %s", err)
                justification = self._reraise_if_not_permitted(err, "CVE_FLAWS", permits)
                justifications.append(justification)

            # Attempt to move all advisories to QE
            futures = []
            for impetus, advisory in impetus_advisories.items():
                if not advisory:
                    continue
                logger.info("Moving advisory %s to QE...", advisory)
                futures.append(self.change_advisory_state(advisory, "QE"))
            try:
                await asyncio.gather(*futures)
            except ChildProcessError as err:
                logger.warn("Error moving advisory %s to QE: %s", advisory, err)

            # Ensure the image advisory is in QE (or later) state.
            image_advisory = impetus_advisories.get("image", 0)
            errata_url = ""

            if assembly_type == assembly.AssemblyTypes.STANDARD:
                if image_advisory <= 0:
                    err = VerificationError(f"No associated image advisory for {self.assembly} is defined.")
                    justification = self._reraise_if_not_permitted(err, "NO_ERRATA", permits)
                    justifications.append(justification)
                else:
                    logger.info("Verifying associated image advisory %s...", image_advisory)
                    image_advisory_info = await self.get_advisory_info(image_advisory)
                    try:
                        self.verify_image_advisory(image_advisory_info)
                        live_id = self.get_live_id(image_advisory_info)
                        assert live_id
                        errata_url = f"https://access.redhat.com/errata/{live_id}"  # don't quote
                    except VerificationError as err:
                        logger.warn("%s", err)
                        justification = self._reraise_if_not_permitted(err, "INVALID_ERRATA_STATUS", permits)
                        justifications.append(justification)

            # Verify attached bugs
            advisories = list(filter(lambda ad: ad > 0, impetus_advisories.values()))
            if advisories:
                if self.skip_attached_bug_check:
                    logger.info("Skip checking attached bugs.")
                else:
                    logger.info("Verifying attached bugs...")
                    try:
                        await self.verify_attached_bugs(advisories)
                    except ChildProcessError as err:
                        logger.warn("Error verifying attached bugs: %s", err)
                        justification = self._reraise_if_not_permitted(err, "ATTACHED_BUGS", permits)
                        justifications.append(justification)

            # Promote release images
            futures = []
            metadata = {}
            description = group_config.get("description")
            if description:
                logger.warning("The following description message will be included in the metadata of release image: %s", description)
                metadata["description"] = str(description)
            if errata_url:
                metadata["url"] = errata_url
            reference_releases = util.get_assmebly_basis(releases_config, self.assembly).get("reference_releases", {})
            tag_stable = assembly_type in [assembly.AssemblyTypes.STANDARD, assembly.AssemblyTypes.CANDIDATE]
            release_infos = await self.promote_all_arches(release_name, arches, previous_list, metadata, reference_releases, tag_stable)
            self._logger.info("All release images for %s have been promoted.", release_name)

            # Wait for release controllers
            pullspecs = list(map(lambda r: r["image"], release_infos))
            if not tag_stable:
                self._logger.warning("This release will not appear on release controllers. Pullspecs: %s", release_name, ", ".join(pullspecs))
                await self._slack_client.say(f"Release {release_name} is ready. It will not appear on the release controllers. Please tell the user to manually pull the release images: {', '.join(pullspecs)}", slack_thread)
            else:  # Wait for release images to be accepted by the release controllers
                self._logger.info("All release images for %s have been successfully promoted. Pullspecs: %s", release_name, ", ".join(pullspecs))

                # check if release is already accepted (in case we timeout and run the job again)
                accepted = []
                for arch in arches:
                    go_arch_suffix = util.go_suffix_for_arch(arch)
                    release_stream = f"4-stable{go_arch_suffix}"
                    accepted.append(await self.is_accepted(release_name, arch, release_stream))

                if not all(accepted):
                    self._logger.info("Determining upgrade tests...")
                    test_commands = self._get_upgrade_tests_commands(release_name, previous_list)
                    message = f"""A new release `{release_name}` is ready and needs some upgrade tests to be triggered.
Please open a chat with @cluster-bot and issue each of these lines individually:
{os.linesep.join(test_commands)}
        """
                    await self._slack_client.say(message, slack_thread)

                    self._logger.info("Waiting for release images for %s to be accepted by the release controller...", release_name)
                    futures = []
                    for arch in arches:
                        go_arch_suffix = util.go_suffix_for_arch(arch)
                        release_stream = f"4-stable{go_arch_suffix}"
                        futures.append(self.wait_for_stable(release_name, arch, release_stream))
                    try:
                        await asyncio.gather(*futures)
                    except RetryError as err:
                        message = f"Timeout waiting for release to be accepted by the release controllers: {err}"
                        self._logger.error(message)
                        self._logger.exception(err)
                        raise TimeoutError(message)

                self._logger.info("All release images for %s have been accepted by the release controllers.", release_name)

                message = f"Release `{release_name}` has been accepted by the release controllers."
                await self._slack_client.say(message, slack_thread)

                # Send image list
                if not image_advisory:
                    self._logger.warning("No need to send an advisory image list because this release doesn't have an image advisory.")
                elif assembly_type == assembly.AssemblyTypes.CANDIDATE:
                    self._logger.warning("No need to send an advisory image list for a candidate release.")
                elif self.skip_image_list:
                    self._logger.warning("Skip sending advisory image list")
                else:
                    self._logger.info("Gathering and sending advisory image list...")
                    mail_dir = self._working_dir / "email"
                    await self.send_image_list_email(release_name, image_advisory, mail_dir)
                    self._logger.info("Advisory image list sent.")

        except Exception as err:
            self._logger.exception(err)
            error_message = f"Error promoting release {release_name}: {err}\n {traceback.format_exc()}"
            message = f"Promoting release {release_name} failed with: {error_message}"
            await self._slack_client.say(message, slack_thread)
            raise

        # Print release infos to console
        data = {
            "group": self.group,
            "assembly": self.assembly,
            "type": assembly_type.value,
            "name": release_name,
            "content": {},
            "justifications": justifications,
        }
        if image_advisory > 0:
            data["advisory"] = image_advisory
        if errata_url:
            data["live_url"] = errata_url
        for arch, release_info in zip(arches, release_infos):
            data["content"][arch] = {
                "pullspec": release_info["image"],
                "digest": release_info["digest"],
            }
            from_release = release_info.get("references", {}).get("metadata", {}).get("annotations", {}).get("release.openshift.io/from-release")
            if from_release:
                data["content"][arch]["from_release"] = from_release
            rhcos = next((t for t in release_info.get("references", {}).get("spec", {}).get("tags", []) if t["name"] == "machine-os-content"), None)
            if rhcos:
                rhcos_version = rhcos["annotations"]["io.openshift.build.versions"].split("=")[1]  # machine-os=48.84.202112162302-0 => 48.84.202112162302-0
                data["content"][arch]["rhcos_version"] = rhcos_version

        json.dump(data, sys.stdout)