def build_docs(config, overwrite=False): local(config, ( 'sphinx-build', '-E' if overwrite else '', config.docs.dir, config.docs.build_dir, ))
def upload_docs(config): source = config.docs.build_dir if not source.endswith('/'): source += '/' destination = posixpath.join(config.docs.upload_path, config.package) if not destination.endswith('/'): destination += '/' url = ':'.join((config.domain_name, destination)) print('Uploading {source} to {url}...'.format_map(locals())) local(config, ( 'rsync', '--rsync-path "sudo -u tangled rsync"', '-rltvz --delete', source, url, )) remote(config, ( 'chgrp -R www-data', config.docs.upload_path, '&& chmod -R u=rwX,g=rX,o=', config.docs.upload_path, ), host=config.domain_name, run_as=None, sudo=True)
def create_release_tag(info, merge): print_step_header(f"Tagging {info.name} release", info.version) current_branch = get_current_branch() if merge: local(("git", "checkout", info.target_branch)) else: local(("git", "checkout", info.dev_branch)) local("git log -1 --oneline") if info.confirmation_required: confirmed = confirm("Tag this commit?") else: printer.warning("Tagging commit") confirmed = True if confirmed: msg = f"Release {info.name} {info.version}" local(("git", "tag", "-a", "-m", msg, info.tag_name)) local(("git", "checkout", current_branch)) if not confirmed: abort()
def build_docs(config, source='docs', destination='docs/_build', type_='html'): local(config, ( 'sphinx-build', '-b', type_, source, destination, ))
def build_docs(source="docs", destination="docs/_build", builder="html", clean=False): if clean: printer.info(f"Removing {destination}...") shutil.rmtree(destination) local(("sphinx-build", "-b", builder, source, destination))
def push_apache_config(config, enable=True): local(config, ( 'rsync -rltvz', '--rsync-path "sudo rsync"', 'etc/apache2/', '{remote.host}:/etc/apache2', )) if enable: remote(config, 'a2ensite {domain_name}')
def install(config, upgrade=False): local(config, ( 'pip install', '--upgrade' if upgrade else '', '-r requirements.txt', )) site_packages = '.env/lib/python{python.version}/site-packages'.format_map( config) site.addsitedir(site_packages)
def reload_graph(log_to=None): # XXX: Only works if `dijkstar serve --workers=1`; if workers is # greater than 1, the Dikstar server process must be restarted # instead. local('curl -X POST "http://localhost:8000/reload-graph"') print() if log_to: message = f'Reloaded graph' log_to_file(log_to, message)
def install(where='.venv', upgrade=False): local(( '{where}/bin/pip'.format(where=where), 'install', ('--upgrade', '--upgrade-strategy', 'eager', 'setuptools', 'pip') if upgrade else None, '-r', 'requirements.txt', ))
def install(config, where='.env', python='python3', upgrade=False, overwrite=False): virtualenv(config, where=where, python=python, overwrite=overwrite) pip = '{where}/bin/pip'.format(where=where) local(config, (pip, 'install', '--upgrade' if upgrade else '', '-e .[dev,paramiko,tox]'))
def backup_db(config): date = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') path = '../tangled.website-{date}.db'.format(date=date) local(config, ( 'rsync', '--rsync-path "sudo -u {deploy.user} rsync"', '{remote.host}:{deploy.root}/site.db', path, )) printer.success('Saved copy of', config.env, 'database to', path)
def push_uwsgi_config(config, enable=True): """Push uWSGI app config.""" file = config.deploy.uwsgi.config_file link = config.deploy.uwsgi.config_link local(config, ( 'rsync -rltvz', '--rsync-path "sudo rsync"', file.lstrip('/'), ':'.join((config.remote.host, file)), )) if enable: remote(config, ('ln -sf', file, link), run_as=None, sudo=True)
def tox( envs: "Pass -e option to tox with the specified environments" = (), recreate: "Pass --recreate flag to tox" = False, clean: "Remove tox directory first" = False, ): if clean: local("rm -rf .tox", echo=True) local(( "tox", ("-e", ",".join(envs)) if envs else None, "--recreate" if recreate else None, ))
def install(upgrade=False, skip_python=False): if not skip_python: local( '.venv/bin/pip install --upgrade --upgrade-strategy eager pip setuptools' ) if upgrade: local('poetry update') if upgrade: local('npm out', raise_on_error=False) local('npm upgrade') else: local('npm install')
def sass(sources: arg(container=tuple), watch=False, quiet=False): args = [] destinations = [] for source in sources: name = os.path.basename(source) root, ext = os.path.splitext(name) destination = os.path.join('public', 'build', f'{root}.css') args.append(f'{source}:{destination}') destinations.append(destination) local(('sass', '--watch' if watch else None, *args)) if not quiet: for source, destination in zip(sources, destinations): print(f'Compiled {source} to {destination}')
def make_dist( version: arg(help="Tag/version to release [latest tag]") = None, formats=("sdist", "wheel"), quiet=False, ): """Make a distribution for upload to PyPI. Switches to the specified tag or branch, makes the distribution, then switches back to the original branch. Intended to be run from the develop branch. If a tag is already checked out, the develop branch will be checked out first and then switched back to after making the distribution. """ current_branch = get_current_branch() original_branch = "develop" if current_branch == "HEAD" else current_branch version = version or get_latest_tag() stdout = "hide" if quiet else None printer.header(f"Making dist for {version}") if version != current_branch: if current_branch == "HEAD": printer.warning("Not on a branch; checking out develop first") else: printer.info("Currently on branch", current_branch) printer.info("Checking out", version) # XXX: Hide warning about detached HEAD state result = local(("git", "checkout", version), stdout=stdout, stderr="capture") if result.failed: print(result.stderr, file=sys.stderr) printer.info("Removing dist directory") rmdir("dist", verbose=not quiet) printer.info("Making dists for", version) for format_ in formats: local(("poetry", "build", "--format", format_), stdout=stdout) if version != current_branch: printer.info("Switching back to", original_branch) # XXX: Hide message about previous HEAD state and branch info result = local(("git", "checkout", original_branch), stdout="hide", stderr="capture") if result.failed: print(result.stderr, file=sys.stderr)
def virtualenv(config, overwrite=False): where = '.env' if os.path.exists(where): if overwrite: print('Overwriting', where) shutil.rmtree(where) create = True else: print(where, 'exists') create = False else: create = True if create: local(config, ('virtualenv -p python{python.version}', where))
def upload_dists( make: arg(help="Make dist first? [yes]") = True, version: arg(help="Version/tag to release [latest tag]") = None, quiet: arg(help="Make dist quietly? [no]") = False, username: arg(help="Twine username [$USER]") = None, password_command: arg(help="Command to retrieve twine password " "(e.g. `password-manager show-password PyPI`) " "[twine prompt]") = None, ): """Upload distributions in ./dist using ``twine``.""" if make: printer.header("Making and uploading distributions") make_dist(quiet=quiet) else: printer.header("Uploading distributions") dists = os.listdir("dist") if not dists: abort(1, "No distributions found in dist directory") paths = [os.path.join("dist", file) for file in dists] printer.info("Found distributions:") for path in paths: printer.info(" -", path) if not confirm("Continue?"): abort() if not username: username = getpass.getuser() environ = {"TWINE_USERNAME": username} if password_command: printer.info(f"Retrieving password via `{password_command}`...") result = local(password_command, stdout="capture") password = result.stdout.strip() environ["TWINE_PASSWORD"] = password printer.warning("TWINE_USERNAME:"******"TWINE_PASSWORD:"******"*" * len(password)) for path in paths: if confirm(f"Upload dist?: {path}"): local(("twine", "upload", path), environ=environ) else: printer.warning("Skipped dist:", path)
def dbshell(user, password, database, host='localhost', port=5432): environ = {} if password: environ['PGPASSWORD'] = password local(( 'pgcli', '--user', user, '--host', host, '--port', port, '--dbname', database, ), environ=environ)
def lint( show_errors: arg(help="Show errors") = True, disable_ignore: arg(no_inverse=True, help="Don't ignore any errors") = False, disable_noqa: arg(no_inverse=True, help="Ignore noqa directives") = False, ): printer.header("Checking for lint...") result = local( ( "flake8", ".", "--ignore=" if disable_ignore else None, "--disable-noqa" if disable_noqa else None, ), stdout="capture", raise_on_error=False, ) pieces_of_lint = len(result.stdout_lines) if pieces_of_lint: ess = "" if pieces_of_lint == 1 else "s" colon = ":" if show_errors else "" message = [ f"{pieces_of_lint} piece{ess} of lint found{colon}", ] if show_errors: message.append(result.stdout.rstrip()) message = "\n".join(message) abort(1, message) else: printer.success("No lint found")
def provision(env='production', tags=(), skip_tags=(), echo=True): if not tags: tags = 'provision' if isinstance(tags, str): tags = (tags,) args = get_ansible_args(env, tags=tags, skip_tags=skip_tags) printer.hr(f'Provisioning {env}...', color='header') return local(args, echo=echo)
def dev_server(default_args, host='localhost', port=5000, directory='public'): printer.header('Running dev server') printer.hr('Cleaning', color='info') clean() printer.info('Running scss watcher in background') sass_args = ['sass', '--watch'] for source in default_args['sass']['sources']: name = os.path.basename(source) root, ext = os.path.splitext(name) destination = os.path.join('public', 'build', f'{root}.css') sass_args.append(f'{source}:{destination}') local(sass_args, background=True, environ={ 'NODE_ENV': 'development', }) wait_for_file(destination) printer.hr('Running rollup watcher in background', color='info') local(['rollup', '--config', '--watch'], background=True, environ={ 'NODE_ENV': 'development', 'LIVE_RELOAD': 'true', }) wait_for_file('public/build/bundle.js') printer.hr(f'Serving {directory} directory at http://{host}:{port}/', color='info') class RequestHandler(SimpleHTTPRequestHandler): def __init__(self, *args, **kwargs): super().__init__(*args, directory=directory, **kwargs) def translate_path(self, path): path = super().translate_path(path) if not os.path.exists(path): path = os.path.join(self.directory, 'index.html') return path server = ThreadingHTTPServer((host, port), RequestHandler) try: server.serve_forever() except KeyboardInterrupt: abort(message='Shut down dev server')
def test_local_ls(self): config = Config() result = local(config, 'ls -1', cd=os.path.dirname(__file__), hide='all') self.assertIn('__init__.py', result.stdout_lines) self.assertIn('commands.cfg', result.stdout_lines) self.assertTrue(result)
def lint(config): result = local(config, 'flake8 .', abort_on_failure=False) pieces_of_lint = len(result.stdout_lines) if pieces_of_lint: s = '' if pieces_of_lint == 1 else 's' printer.error('{pieces_of_lint} piece{s} of lint found'.format_map( locals())) else: printer.success('No lint found')
def dbshell(db): password = db.get('password') local(( 'docker', 'exec', '--interactive', '--tty', ('--env', f'PGPASSWORD="******"') if password else None, 'bycycledocker_postgres_1', 'psql', '--username', db['user'], '--host', db['host'], '--port', db['port'], '--dbname', db['database'], ))
def format_code(check=False, where="./"): if check: printer.header("Checking code formatting...") check_arg = "--check" raise_on_error = False else: printer.header("Formatting code...") check_arg = None raise_on_error = True result = local(("black", check_arg, where), raise_on_error=raise_on_error) return result
def install(upgrade=False): if upgrade: local( '.venv/bin/pip install --upgrade --upgrade-strategy eager pip setuptools' ) local('poetry update') else: local('poetry install')
def rollup(env, live_reload: arg(type=bool) = None, watch=False, quiet=False): environ = {'NODE_ENV': env} if live_reload is not None: environ['LIVE_RELOAD'] = str(int(live_reload)) kwargs = { 'environ': environ, 'stdout': 'hide' if quiet else None, 'stderr': 'capture', 'raise_on_error': False, } result = local(('rollup', '--config', '--watch' if watch else None), **kwargs) if result.failed: abort(result.return_code, result.stderr)
def prepare_release(info): version = info.version print_step_header("Preparing release", version, "on", info.date) current_branch = get_current_branch() local(("git", "checkout", info.dev_branch)) if info.pyproject_file: quote = info.pyproject_version_quote update_line( info.pyproject_file, info.pyproject_version_line_number, f"version = {quote}{version}{quote}", ) if info.version_file: quote = info.version_quote update_line( info.version_file, info.version_line_number, f"__version__ = {quote}{version}{quote}", ) update_line( info.change_log, info.change_log_line_number, f"## {version} - {info.date}", ) commit_files = (info.pyproject_file, info.version_file, info.change_log) local(("git", "diff", *commit_files)) if info.confirmation_required: confirm("Commit these changes?", abort_on_unconfirmed=True) else: printer.warning("Committing changes") msg = f"Prepare {info.name} release {version}" msg = prompt("Commit message", default=msg) local(("git", "commit", commit_files, "-m", msg)) local(("git", "checkout", current_branch))
def merge_to_target_branch(info): print_step_header( "Merging", info.dev_branch, "into", info.target_branch, "for release", info.version, ) current_branch = get_current_branch() local(( "git", "log", "--oneline", "--reverse", f"{info.target_branch}..{info.dev_branch}", )) if info.confirmation_required: msg = (f"Merge these changes from {info.dev_branch} " f"into {info.target_branch} " f"for release {info.version}?") confirm(msg, abort_on_unconfirmed=True) else: printer.warning( "Merging changes from", info.dev_branch, "into", info.target_branch, "for release", info.release, ) local(("git", "checkout", info.target_branch)) msg = f"Merge branch '{info.dev_branch}' for {info.name} release {info.version}" msg = prompt("Commit message", default=msg) local(("git", "merge", "--no-ff", info.dev_branch, "-m", msg)) local(("git", "checkout", current_branch))
def resume_development(info): next_version = info.next_version dev_version = f"{next_version}.dev0" print_step_header(f"Resuming development of {info.name} at {next_version} " f"({dev_version})") current_branch = get_current_branch() if info.pyproject_file: quote = info.pyproject_version_quote update_line( info.pyproject_file, info.pyproject_version_line_number, f"version = {quote}{dev_version}{quote}", ) if info.version_file: quote = info.version_quote update_line( info.version_file, info.version_line_number, f"__version__ = {quote}{dev_version}{quote}", ) new_change_log_lines = [ f"## {next_version} - unreleased\n\n", "In progress...\n\n", ] with info.change_log.open() as fp: lines = fp.readlines() lines = (lines[:info.change_log_line_number] + new_change_log_lines + lines[info.change_log_line_number:]) with info.change_log.open("w") as fp: fp.writelines(lines) commit_files = (info.pyproject_file, info.version_file, info.change_log) local(("git", "diff", *commit_files)) if info.confirmation_required: confirm("Commit these changes?", abort_on_unconfirmed=True) else: printer.warning("Committing changes") msg = f"Resume development of {info.name} at {next_version}" msg = prompt("Commit message", default=msg) local(("git", "commit", commit_files, "-m", msg)) local(("git", "checkout", current_branch))
def create(): local(config, ('virtualenv', '-p', python, where)) printer.success( 'Virtualenv created; activate it by running `source {where}/bin/activate`' .format_map(locals()))
def deploy(config, version=None, overwrite=False, overwrite_venv=False, install=True, push=True, link=True, reload=True): # Setup ---------------------------------------------------------- if version: config = config.copy(version=version) elif config.get('version'): printer.info('Using default version:', config.version) else: abort(1, 'Version must be specified via config or passed as an option') # Local ---------------------------------------------------------- build_dir = config.build.dir if overwrite and os.path.exists(build_dir): shutil.rmtree(build_dir) os.makedirs(build_dir, exist_ok=True) # Add config files copy_file(config, 'application.wsgi', build_dir, template=True) copy_file(config, 'base.ini', build_dir) copy_file(config, '{env}.ini', build_dir, template=True) copy_file(config, 'commands.py', build_dir) copy_file(config, 'commands.cfg', build_dir) # Create source distributions dist_dir = os.path.abspath(config.build.dist_dir) sdist_command = ('python setup.py sdist --dist-dir', dist_dir) local(config, sdist_command, hide='stdout') for path in config.deploy.sdists: local(config, sdist_command, hide='stdout', cd=path) tarball_name = '{config.version}.tar.gz'.format(config=config) tarball_path = os.path.join(build_dir, tarball_name) with tarfile.open(tarball_path, 'w:gz') as tarball: tarball.add(build_dir, config.version) if push: local(config, ( 'rsync -rltvz', '--rsync-path "sudo -u {deploy.user} rsync"', tarball_path, '{remote.host}:{deploy.root}', )) # Remote ---------------------------------------------------------- deploy_dir_exists = remote(config, 'test -d {deploy.dir}', abort_on_failure=False) if deploy_dir_exists and overwrite: remote(config, 'rm -r {deploy.dir}') remote(config, ('tar -xvzf', tarball_name), cd='{deploy.root}') # Create virtualenv for this version venv_exists = remote(config, 'test -d {deploy.venv}', abort_on_failure=False) if venv_exists and overwrite_venv: remote(config, 'rm -r {deploy.venv}') venv_exists = False if not venv_exists: remote(config, ( 'python{python.version} -m venv {deploy.venv} &&', '{deploy.pip.exe} install', '--cache-dir {deploy.pip.cache_dir}', '--upgrade setuptools pip wheel', )) # Build source if install: remote(config, ( '{deploy.pip.exe}', 'install', '--find-links {deploy.pip.find_links}', '--cache-dir {deploy.pip.cache_dir}', '--disable-pip-version-check', '{package}', ), cd='{deploy.root}', timeout=120) # Make this version the current version if link: remote(config, 'ln -sfn {deploy.dir} {deploy.link}') # Set permissions remote(config, 'chmod -R ug=rwX,o= {deploy.root}') if reload: reload_uwsgi(config)
def install(update=False): if update: local("poetry update") local("poetry install")