def create_release_tag(info, merge): print_step_header(f"Tagging {info.name} release", info.version) current_branch = get_current_branch() if merge: local(("git", "checkout", info.target_branch)) else: local(("git", "checkout", info.dev_branch)) local("git log -1 --oneline") if info.confirmation_required: confirmed = confirm("Tag this commit?") else: printer.warning("Tagging commit") confirmed = True if confirmed: msg = f"Release {info.name} {info.version}" local(("git", "tag", "-a", "-m", msg, info.tag_name)) local(("git", "checkout", current_branch)) if not confirmed: abort()
def find_change_log_section(change_log, version): # Find the first line that starts with '##'. Extract the version and # date from that line. The version must be the specified release # version OR the date must be the literal string 'unreleased'. # E.g.: ## 1.0.0 - unreleased change_log_header_re = r"^## (?P<version>.+) - (?P<date>.+)$" with change_log.open() as fp: for line_number, line in enumerate(fp): match = re.search(change_log_header_re, line) if match: found_version = match.group("version") found_date = match.group("date") if found_version == version: if found_date != "unreleased": printer.warning("Re-releasing", version) elif found_date == "unreleased": if found_version != version: printer.warning("Replacing", found_version, "with", version) else: msg = (f"Expected version {version} or release date " f'"unreleased"; got:\n\n {line}') abort(7, msg) return line_number abort(8, "Could not find section in change log")
def lint( show_errors: arg(help="Show errors") = True, disable_ignore: arg(no_inverse=True, help="Don't ignore any errors") = False, disable_noqa: arg(no_inverse=True, help="Ignore noqa directives") = False, ): printer.header("Checking for lint...") result = local( ( "flake8", ".", "--ignore=" if disable_ignore else None, "--disable-noqa" if disable_noqa else None, ), stdout="capture", raise_on_error=False, ) pieces_of_lint = len(result.stdout_lines) if pieces_of_lint: ess = "" if pieces_of_lint == 1 else "s" colon = ":" if show_errors else "" message = [ f"{pieces_of_lint} piece{ess} of lint found{colon}", ] if show_errors: message.append(result.stdout.rstrip()) message = "\n".join(message) abort(1, message) else: printer.success("No lint found")
def bycycle(service: arg(choices=('lookup', 'route')), q): """Run a byCycle service.""" module_name = 'bycycle.core.service.{service}'.format(service=service) service_factory = load_object(module_name, 'Service') if service == 'route': q = re.split('\s+to\s+', q, re.I) if len(q) < 2: abort(1, 'Route must be specified as "A to B"') engine = get_engine() session_factory = get_session_factory(engine) session = session_factory() start_time = time.time() try: service = service_factory(session) response = service.query(q) except Exception: session.rollback() raise else: session.close() print(response) print('{:.2f} seconds'.format(time.time() - start_time))
def find_change_log(): change_log_candidates = ["CHANGELOG", "CHANGELOG.md"] for candidate in change_log_candidates: path = pathlib.Path.cwd() / candidate if path.is_file(): return path abort( 6, f"Could not find change log; tried {', '.join(change_log_candidates)}")
def make_app(self, app_factory, settings_file, settings): settings = self.make_settings(settings_file, settings) if app_factory is None: app_factory = settings.get('factory') if app_factory is None: abort(1, ( 'An app factory must be specified via the factory setting ' 'or using the --factory option.' '\nDid you specify a settings file via --settings-file?' '\nIf so, does it contain an [app] section with a factory setting?' )) app_factory = load_object(app_factory, 'make_app') app = app_factory(settings) return app
def rollup(env, live_reload: arg(type=bool) = None, watch=False, quiet=False): environ = {'NODE_ENV': env} if live_reload is not None: environ['LIVE_RELOAD'] = str(int(live_reload)) kwargs = { 'environ': environ, 'stdout': 'hide' if quiet else None, 'stderr': 'capture', 'raise_on_error': False, } result = local(('rollup', '--config', '--watch' if watch else None), **kwargs) if result.failed: abort(result.return_code, result.stderr)
def upload_dists( make: arg(help="Make dist first? [yes]") = True, version: arg(help="Version/tag to release [latest tag]") = None, quiet: arg(help="Make dist quietly? [no]") = False, username: arg(help="Twine username [$USER]") = None, password_command: arg(help="Command to retrieve twine password " "(e.g. `password-manager show-password PyPI`) " "[twine prompt]") = None, ): """Upload distributions in ./dist using ``twine``.""" if make: printer.header("Making and uploading distributions") make_dist(quiet=quiet) else: printer.header("Uploading distributions") dists = os.listdir("dist") if not dists: abort(1, "No distributions found in dist directory") paths = [os.path.join("dist", file) for file in dists] printer.info("Found distributions:") for path in paths: printer.info(" -", path) if not confirm("Continue?"): abort() if not username: username = getpass.getuser() environ = {"TWINE_USERNAME": username} if password_command: printer.info(f"Retrieving password via `{password_command}`...") result = local(password_command, stdout="capture") password = result.stdout.strip() environ["TWINE_PASSWORD"] = password printer.warning("TWINE_USERNAME:"******"TWINE_PASSWORD:"******"*" * len(password)) for path in paths: if confirm(f"Upload dist?: {path}"): local(("twine", "upload", path), environ=environ) else: printer.warning("Skipped dist:", path)
def drop_db(env, database, superuser='******', superuser_password='', superuser_database='postgres', host='localhost', port=5432): if env == 'prod': abort(1, 'Cannot drop prod database') prompt = 'Drop database {database} via {user}@{host}?'.format_map(locals()) if not confirm(prompt, yes_values=['yes']): abort() engine = create_engine(superuser, superuser_password, host, port, superuser_database) execute(engine, ('DROP DATABASE', database))
def get_current_version(file, name, abort_on_not_found=True): # Extract current version from __version__ in version file. # # E.g.: __version__ = '1.0.dev0' version_re = ( fr"""^{name}""" r""" *= *""" r"""(?P<quote>['"])((?P<version>.+?)(?P<dev_marker>\.dev\d+)?)?\1 *$""" ) with file.open() as fp: for line_number, line in enumerate(fp): match = re.search(version_re, line) if match: return line_number, match.group("quote"), match.group( "version") if abort_on_not_found: abort(4, f"Could not find {name} in {file}")
def drop_db(config, user='******', password=None, host=None, port=None, database=None): if config.env == 'prod': abort(1, 'Cannot drop prod database') database = database or config.db['database'] prompt = 'Drop database {database}?'.format_map(locals()) if not confirm(config, prompt, yes_values=['yes']): abort() engine = create_engine(config, user, password, host, port, 'postgres') execute(engine, 'DROP DATABASE {database}'.format_map(locals()))
def dev_server(default_args, host='localhost', port=5000, directory='public'): printer.header('Running dev server') printer.hr('Cleaning', color='info') clean() printer.info('Running scss watcher in background') sass_args = ['sass', '--watch'] for source in default_args['sass']['sources']: name = os.path.basename(source) root, ext = os.path.splitext(name) destination = os.path.join('public', 'build', f'{root}.css') sass_args.append(f'{source}:{destination}') local(sass_args, background=True, environ={ 'NODE_ENV': 'development', }) wait_for_file(destination) printer.hr('Running rollup watcher in background', color='info') local(['rollup', '--config', '--watch'], background=True, environ={ 'NODE_ENV': 'development', 'LIVE_RELOAD': 'true', }) wait_for_file('public/build/bundle.js') printer.hr(f'Serving {directory} directory at http://{host}:{port}/', color='info') class RequestHandler(SimpleHTTPRequestHandler): def __init__(self, *args, **kwargs): super().__init__(*args, directory=directory, **kwargs) def translate_path(self, path): path = super().translate_path(path) if not os.path.exists(path): path = os.path.join(self.directory, 'index.html') return path server = ThreadingHTTPServer((host, port), RequestHandler) try: server.serve_forever() except KeyboardInterrupt: abort(message='Shut down dev server')
def drop_db( env, db, superuser='******', superuser_password='******', superuser_database='postgres', ): if env == 'production': abort(1, 'Cannot drop production database') host = db['host'] port = db['port'] database = db['database'] prompt = f'Drop database {database} via {superuser}@{host}?' if not confirm(prompt, yes_values=['yes']): abort() engine = create_engine(superuser, superuser_password, host, port, superuser_database) execute(engine, ('DROP DATABASE', database))
def get_next_version(current_version): next_version_re = r"^(?P<major>\d+)\.(?P<minor>\d+)(?P<rest>.*)$" match = re.search(next_version_re, current_version) if match: major = match.group("major") minor = match.group("minor") major = int(major) minor = int(minor) rest = match.group("rest") patch_re = r"^\.(?P<patch>\d+)$" match = re.search(patch_re, rest) if match: # X.Y.Z minor += 1 patch = match.group("patch") next_version = f"{major}.{minor}.{patch}" else: pre_re = r"^(?P<pre_marker>a|b|rc)(?P<pre_version>\d+)$" match = re.search(pre_re, rest) if match: # X.YaZ pre_marker = match.group("pre_marker") pre_version = match.group("pre_version") pre_version = int(pre_version) pre_version += 1 next_version = f"{major}.{minor}{pre_marker}{pre_version}" else: # X.Y or starts with X.Y (but is not X.Y.Z or X.YaZ) minor += 1 next_version = f"{major}.{minor}" return next_version abort( 5, f"Cannot automatically determine next version from {current_version}")
def release(config, version=None, date=None, tag_name=None, next_version=None, prepare=True, merge=True, create_tag=True, resume=True, yes=False): def update_line(file_name, line_number, content): with open(file_name) as fp: lines = fp.readlines() lines[line_number] = content with open(file_name, 'w') as fp: fp.writelines(lines) result = local(config, 'git rev-parse --abbrev-ref HEAD', hide='stdout') current_branch = result.stdout.strip() if current_branch != 'develop': abort(1, 'Must be on develop branch to make a release') init_module = 'runcommands/__init__.py' changelog = 'CHANGELOG' # E.g.: __version__ = '1.0.dev0' version_re = r"^__version__ = '(?P<version>.+)(?P<dev_marker>\.dev\d+)'$" # E.g.: ## 1.0.0 - 2017-04-01 changelog_header_re = r'^## (?P<version>.+) - (?P<date>.+)$' with open(init_module) as fp: for init_line_number, line in enumerate(fp): if line.startswith('__version__'): match = re.search(version_re, line) if match: current_version = match.group('version') if not version: version = current_version break else: abort( 1, 'Could not find __version__ in {init_module}'.format_map( locals())) date = date or datetime.date.today().isoformat() tag_name = tag_name or version if next_version is None: next_version_re = r'^(?P<major>\d+)\.(?P<minor>\d+)(?P<rest>.*)$' match = re.search(next_version_re, version) if match: major = match.group('major') minor = match.group('minor') major = int(major) minor = int(minor) rest = match.group('rest') patch_re = r'^\.(?P<patch>\d+)$' match = re.search(patch_re, rest) if match: # X.Y.Z minor += 1 patch = match.group('patch') next_version = '{major}.{minor}.{patch}'.format_map(locals()) else: pre_re = r'^(?P<pre_marker>a|b|rc)(?P<pre_version>\d+)$' match = re.search(pre_re, rest) if match: # X.YaZ pre_marker = match.group('pre_marker') pre_version = match.group('pre_version') pre_version = int(pre_version) pre_version += 1 next_version = '{major}.{minor}{pre_marker}{pre_version}'.format_map( locals()) else: # X.Y or starts with X.Y (but is not X.Y.Z or X.YaZ) minor += 1 next_version = '{major}.{minor}'.format_map(locals()) if next_version is None: msg = 'Cannot automatically determine next version from {version}'.format_map( locals()) abort(3, msg) next_version_dev = '{next_version}.dev0'.format_map(locals()) # Find the first line that starts with '##'. Extract the version and # date from that line. The version must be the specified release # version OR the date must be the literal string 'unreleased'. with open(changelog) as fp: for changelog_line_number, line in enumerate(fp): if line.startswith('## '): match = re.search(changelog_header_re, line) if match: found_version = match.group('version') found_date = match.group('date') if found_version == version: if found_date != 'unreleased': printer.warning('Re-releasing', version) elif found_date == 'unreleased': if found_version != version: printer.warning('Replacing', found_version, 'with', version) else: msg = ( 'Expected version {version} or release date "unreleased"; got:\n\n' ' {line}').format_map(locals()) abort(4, msg) break else: abort(5, 'Could not find section in change log') printer.info('Version:', version) printer.info('Tag name:', tag_name) printer.info('Release date:', date) printer.info('Next version:', next_version) msg = 'Continue with release?: {version} - {date}'.format_map(locals()) yes or confirm(config, msg, abort_on_unconfirmed=True) printer.header('Testing...') tox(config) # Prepare if prepare: printer.header('Preparing release', version, 'on', date) updated_init_line = "__version__ = '{version}'\n".format_map(locals()) updated_changelog_line = '## {version} - {date}\n'.format_map(locals()) update_line(init_module, init_line_number, updated_init_line) update_line(changelog, changelog_line_number, updated_changelog_line) local(config, ('git diff', init_module, changelog)) yes or confirm( config, 'Commit these changes?', abort_on_unconfirmed=True) msg = prompt('Commit message', default='Prepare release {version}'.format_map(locals())) msg = '-m "{msg}"'.format_map(locals()) local(config, ('git commit', init_module, changelog, msg)) # Merge and tag if merge: printer.header('Merging develop into master for release', version) local(config, 'git log --oneline --reverse master..') msg = 'Merge these changes from develop into master for release {version}?' msg = msg.format_map(locals()) yes or confirm(config, msg, abort_on_unconfirmed=True) local(config, 'git checkout master') msg = '"Merge branch \'develop\' for release {version}"'.format_map( locals()) local(config, ('git merge --no-ff develop -m', msg)) if create_tag: printer.header('Tagging release', version) msg = '"Release {version}"'.format_map(locals()) local(config, ('git tag -a -m', msg, version)) local(config, 'git checkout develop') # Resume if resume: printer.header('Resuming development at', next_version) updated_init_line = "__version__ = '{next_version_dev}'\n".format_map( locals()) new_changelog_lines = [ '## {next_version} - unreleased\n\n'.format_map(locals()), 'In progress...\n\n', ] update_line(init_module, init_line_number, updated_init_line) with open(changelog) as fp: lines = fp.readlines() lines = lines[:changelog_line_number] + new_changelog_lines + lines[ changelog_line_number:] with open(changelog, 'w') as fp: fp.writelines(lines) local(config, ('git diff', init_module, changelog)) yes or confirm( config, 'Commit these changes?', abort_on_unconfirmed=True) msg = prompt('Commit message', default='Resume development at {next_version}'.format_map( locals())) msg = '-m "{msg}"'.format_map(locals()) local(config, ('git commit', init_module, changelog, msg))
def build_js(config, sources=(), echo=False, hide=None): # TODO: Pass sources to Node script? if sources: abort(1, 'The --sources option is currently ignored by build_js') where = abs_path(args_to_str('{package}:static', format_kwargs=config)) local(config, ('node', 'build.js'), cd=where, echo=echo, hide=hide)
def test(config, tests=(), fail_fast=False, verbosity=1, with_coverage=False, with_lint=False): from coverage import Coverage from django import setup from django.conf import settings from django.conf.urls import url from django.http import HttpResponse from django.test.runner import DiscoverRunner with_coverage = with_coverage and not tests with_lint = with_lint and not tests settings.configure( DEBUG=True, ALLOWED_HOSTS=['*'], DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, ROOT_URLCONF=( url(r'^test$', lambda request: HttpResponse('test'), name='test'), ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', 'arcutils', ), MIDDLEWARE_CLASSES=[], LDAP={ 'default': { 'host': 'ldap://ldap-login.oit.pdx.edu', 'username': '', 'password': '', 'search_base': 'ou=people,dc=pdx,dc=edu', } }, TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', ] } }], ) setup() runner = DiscoverRunner(failfast=fail_fast, verbosity=verbosity) if with_coverage: coverage = Coverage(source=['arcutils']) coverage.start() if tests: num_errors = runner.run_tests(tests) else: num_errors = runner.run_tests(['arcutils']) if num_errors: abort(code=num_errors, message='Test failure(s) encountered; aborting') if with_coverage: coverage.stop() coverage.report() if with_lint: lint(config)
def deploy(env, host, version=None, build_=True, clean_=True, verbose=False, push=True, overwrite=False, chown=True, chmod=True, link=True, dry_run=False): if env == 'development': abort(1, 'Can\'t deploy to development environment') version = version or git_version() root_dir = f'/sites/{host}/webui' build_dir = f'{root_dir}/builds/{version}' link_path = f'{root_dir}/current' real_run = not dry_run printer.hr( f'{"[DRY RUN] " if dry_run else ""}Deploying version {version} to {env}', color='header') printer.header('Host:', host) printer.header('Remote root directory:', root_dir) printer.header('Remote build directory:', build_dir) printer.header('Remote link to current build:', link_path) printer.header('Steps:') printer.header(f' - {"Cleaning" if clean_ else "Not cleaning"}') printer.header(f' - {"Building" if build_ else "Not building"}') printer.header(f' - {f"Pushing" if push else "Not pushing"}') printer.header(f' - {f"Setting owner" if chown else "Not setting owner"}') printer.header( f' - {f"Setting permissions" if chmod else "Not setting permissions"}' ) if overwrite: printer.warning(f' - Overwriting {build_dir}') printer.header(f' - {"Linking" if link else "Not linking"}') confirm(f'Continue with deployment of version {version} to {env}?', abort_on_unconfirmed=True) if build_: build(env, clean_=clean_, verbose=verbose) if push: remote(f'test -d {build_dir} || mkdir -p {build_dir}') printer.info(f'Pushing public/ to {build_dir}...') sync('public/', f'{build_dir}/', host, delete=overwrite, dry_run=dry_run, echo=verbose) if chown: owner = 'bycycle:www-data' printer.info(f'Setting ownership of {build_dir} to {owner}...') if real_run: remote(('chown', '-R', owner, build_dir), sudo=True) if chmod: mode = 'u=rwX,g=rwX,o=' printer.info(f'Setting permissions on {build_dir} to {mode}...') if real_run: remote(('chmod', '-R', mode, build_dir), sudo=True) if link: printer.info(f'Linking {link_path} to {build_dir}') if real_run: remote(('ln', '-sfn', build_dir, link_path))
def deploy(config, version=None, overwrite=False, overwrite_venv=False, install=True, push=True, link=True, reload=True): # Setup ---------------------------------------------------------- if version: config = config.copy(version=version) elif config.get('version'): printer.info('Using default version:', config.version) else: abort(1, 'Version must be specified via config or passed as an option') # Local ---------------------------------------------------------- build_dir = config.build.dir if overwrite and os.path.exists(build_dir): shutil.rmtree(build_dir) os.makedirs(build_dir, exist_ok=True) # Add config files copy_file(config, 'application.wsgi', build_dir, template=True) copy_file(config, 'base.ini', build_dir) copy_file(config, '{env}.ini', build_dir, template=True) copy_file(config, 'commands.py', build_dir) copy_file(config, 'commands.cfg', build_dir) # Create source distributions dist_dir = os.path.abspath(config.build.dist_dir) sdist_command = ('python setup.py sdist --dist-dir', dist_dir) local(config, sdist_command, hide='stdout') for path in config.deploy.sdists: local(config, sdist_command, hide='stdout', cd=path) tarball_name = '{config.version}.tar.gz'.format(config=config) tarball_path = os.path.join(build_dir, tarball_name) with tarfile.open(tarball_path, 'w:gz') as tarball: tarball.add(build_dir, config.version) if push: local(config, ( 'rsync -rltvz', '--rsync-path "sudo -u {deploy.user} rsync"', tarball_path, '{remote.host}:{deploy.root}', )) # Remote ---------------------------------------------------------- deploy_dir_exists = remote(config, 'test -d {deploy.dir}', abort_on_failure=False) if deploy_dir_exists and overwrite: remote(config, 'rm -r {deploy.dir}') remote(config, ('tar -xvzf', tarball_name), cd='{deploy.root}') # Create virtualenv for this version venv_exists = remote(config, 'test -d {deploy.venv}', abort_on_failure=False) if venv_exists and overwrite_venv: remote(config, 'rm -r {deploy.venv}') venv_exists = False if not venv_exists: remote(config, ( 'python{python.version} -m venv {deploy.venv} &&', '{deploy.pip.exe} install', '--cache-dir {deploy.pip.cache_dir}', '--upgrade setuptools pip wheel', )) # Build source if install: remote(config, ( '{deploy.pip.exe}', 'install', '--find-links {deploy.pip.find_links}', '--cache-dir {deploy.pip.cache_dir}', '--disable-pip-version-check', '{package}', ), cd='{deploy.root}', timeout=120) # Make this version the current version if link: remote(config, 'ln -sfn {deploy.dir} {deploy.link}') # Set permissions remote(config, 'chmod -R ug=rwX,o= {deploy.root}') if reload: reload_uwsgi(config)
def make_release( # Steps test: arg( short_option="-e", help="Run tests first", ) = True, prepare: arg( short_option="-p", help="Run release preparation tasks", ) = True, merge: arg( short_option="-m", help="Run merge tasks", ) = True, tag: arg( short_option="-t", help="Create release tag", ) = True, resume: arg( short_option="-r", help="Run resume development tasks", ) = True, test_command: arg( short_option="-c", help="Test command", ) = None, # Step config name: arg( short_option="-n", help="Release/package name [base name of CWD]", ) = None, version: arg( short_option="-v", help="Version to release", ) = None, version_file: arg( short_option="-V", help="File __version__ is in [search typical files]", ) = None, date: arg( short_option="-d", help="Release date [today]", ) = None, dev_branch: arg( short_option="-b", help="Branch to merge from [current branch]", ) = None, target_branch: arg( short_option="-B", help="Branch to merge into [prod]", ) = "prod", tag_name: arg( short_option="-a", help=("Release tag name; {name} and {version} in the tag name " "will be substituted [version]"), ) = None, next_version: arg( short_option="-w", help="Anticipated version of next release", ) = None, # Other yes: arg( short_option="-y", no_inverse=True, help="Run without being prompted for any confirmations", ) = False, show_version: arg( short_option="-s", long_option="--show-version", no_inverse=True, help="Show make-release version and exit", ) = False, ): """Make a release. Tries to guess the release version based on the current version and the next version based on the release version. Steps: - Prepare release: - Update ``version`` in ``pyproject.toml`` (if present) - Update ``__version__`` in version file (if present; typically ``package/__init__.py`` or ``src/package/__init__.py``) - Update next version header in change log - Commit version file and change log with prepare message - Merge to target branch (``prod`` by default): - Merge current branch into target branch with merge message - Create tag: - Add annotated tag for latest version; when merging, the tag will point at the merge commit on the target branch; when not merging, the tag will point at the prepare release commit on the current branch - Resume development: - Update version in ``pyproject.toml`` to next version (if present) - Update version in version file to next version (if present) - Add in-progress section for next version to change log - Commit version file and change log with resume message Caveats: - The next version will have the dev marker ".dev0" appended to it - The change log must be in Markdown format; release section headers must be second-level (i.e., start with ##) - The change log must be named CHANGELOG or CHANGELOG.md - The first release section header in the change log will be updated, so there always needs to be an in-progress section for the next version - Building distributions and uploading to PyPI isn't handled; after creating a release, build distributions using ``python setup.py sdist`` or ``poetry build`` (for example) and then upload them with ``twine upload`` """ if show_version: from . import __version__ print(f"make-release version {__version__}") return cwd = pathlib.Path.cwd() name = name or cwd.name printer.hr("Releasing", name) print_step("Testing?", test) print_step("Preparing?", prepare) print_step("Merging?", merge) print_step("Tagging?", tag) print_step("Resuming development?", resume) if merge: if dev_branch is None: dev_branch = get_current_branch() if dev_branch == target_branch: abort(1, f"Dev branch and target branch are the same: {dev_branch}") pyproject_file = pathlib.Path("pyproject.toml") if pyproject_file.is_file(): pyproject_version_info = get_current_version(pyproject_file, "version") ( pyproject_version_line_number, pyproject_version_quote, pyproject_current_version, ) = pyproject_version_info else: pyproject_file = None pyproject_version_line_number = None pyproject_version_quote = None pyproject_current_version = None if version_file: version_file = pathlib.Path(version_file) version_info = get_current_version(version_file) version_line_number, version_quote, current_version = version_info else: version_info = find_version_file() if version_info is not None: ( version_file, version_line_number, version_quote, current_version, ) = version_info else: version_file = None version_line_number = None version_quote = None current_version = pyproject_current_version if (current_version and pyproject_current_version and current_version != pyproject_current_version): abort( 2, f"Version in pyproject.toml and " f"{version_file.relative_to(cwd)} don't match", ) if not version: if current_version: version = current_version else: message = ("Current version not set in version file, so release " "version needs to be passed explicitly") abort(3, message) if tag_name: tag_name = tag_name.format(name=name, version=version) else: tag_name = version date = date or datetime.date.today().isoformat() if not next_version: next_version = get_next_version(version) change_log = find_change_log() change_log_line_number = find_change_log_section(change_log, version) info = ReleaseInfo( name, dev_branch, target_branch, pyproject_file, pyproject_version_line_number, pyproject_version_quote, version_file, version_line_number, version_quote, version, tag_name, date, next_version, change_log, change_log_line_number, not yes, ) print_info("Version:", info.version) print_info("Release date:", info.date) if merge: print_info("Dev branch:", dev_branch) print_info("Target branch:", target_branch) if tag: print_info("Tag name:", tag_name) print_info("Next version:", info.next_version) if info.confirmation_required: msg = f"Continue with release?: {info.version} - {info.date}" confirm(msg, abort_on_unconfirmed=True) else: printer.warning( "Continuing with release: {info.version} - {info.date}") if test: print_step_header("Testing") if test_command is None: if (cwd / "tests").is_dir(): test_command = "python -m unittest discover tests" else: test_command = "python -m unittest discover ." local(test_command, echo=True) else: printer.warning("Skipping tests") if prepare: prepare_release(info) if merge: merge_to_target_branch(info) if tag: create_release_tag(info, merge) if resume: resume_development(info)