def setup_heroku_git_remote(c, app_instance): check_if_logged_in_to_heroku(c) remote_name = 'heroku-{app}'.format(app=app_instance) local('heroku git:remote --app {app} --remote {remote}'.format( app=app_instance, remote=remote_name )) return remote_name
def pull_database_from_heroku(c, app_instance): check_if_logged_in_to_heroku(c) delete_local_database(c) local('heroku pg:pull --app {app} DATABASE_URL {local_database}'.format( app=app_instance, local_database=LOCAL_DATABASE_NAME ))
def clone_database(ctx): """ Clone the database specified in config.py. Use when working with migrations. The new database will be prefixed by the hash of the latest alembic revision. """ dbdump("alexandria_preclone.sql") print "For accidents, the back-up `alexandria_preclone.sql` was created." from sqlalchemy import Table, MetaData from sqlalchemy.sql import select engine = create_engine(def_cfg.SQLALCHEMY_DATABASE_URI) meta = MetaData(bind=engine) last_commit = local("git log --oneline | head -n1 | awk '{print $1}'", capture=True) new_db_name = '_'.join((def_cfg.SQL_DB_NAME, last_commit)) new_test_db_name = '_'.join((new_db_name, "test")) __docker_compose_run( 'mysql -h db -u root -e "CREATE DATABASE %s DEFAULT CHARACTER SET = utf8"' % new_test_db_name, "db") __docker_compose_run( 'mysql -h db -u root -e "CREATE DATABASE %s DEFAULT CHARACTER SET = utf8"' % new_db_name, "db") local( "%s | %s" % (__docker_compose_runstr( "mysqldump -h db -u root %s" % def_cfg.SQL_DB_NAME, "db_runner_1"), __docker_compose_runstr("mysql -h db -u root %s" % new_db_name, "db_runner_2"))) print "NOTE: Must reconfigure this branch to use %s and %s instead" % ( new_db_name, new_test_db_name) print "Don't forget to reconfigure alembic.ini as well!"
def test(ctx): cmds = [ "coverage run --source=. manage.py test PManager.ViewsTest --traceback ", "coverage report --skip-covered", "coverage html --skip-covered", ] for cmd in cmds: local(cmd, echo=True)
def pull_database_from_dokku(c, dokku_remote, app_db_instance): clean_local_database(c) local('ssh {remote} postgres:export {db_instance} | ' 'pg_restore -d {local_database}'.format( remote=dokku_remote, db_instance=app_db_instance, local_database=LOCAL_DATABASE_NAME, ))
def test(filepath=''): if filepath: if not op.exists(filepath): print('Error: could not find file {}'.format(filepath)) exit(-1) cmd = 'python setup.py test -a ' + filepath else: cmd = 'python setup.py test' local(cmd)
def install_deps(req_filepaths = ['requirements.txt']): # for line in fileinput.input(): deps = get_requirements(*req_filepaths) try: for dep_name in deps: cmd = "pip install '{0}'".format(dep_name) print('#', cmd) local(cmd) except: print('Error installing {}'.format(dep_name))
def install_deps(req_filepaths=['requirements.txt']): # for line in fileinput.input(): deps = get_requirements(*req_filepaths) try: for dep_name in deps: cmd = "pip install '{0}'".format(dep_name) print('#', cmd) local(cmd) except: print('Error installing {}'.format(dep_name))
def deploy(ctx): directory_app = "apps/newapp" app_name = "newapp" local("python manage.py collectstatic --noinput", echo=True) local("find . -name '__pycache__' |xargs rm -rf ", echo=True) rsync(ctx, ".", directory_app, exclude=exclude_dirs) with ctx.cd(directory_app): with ctx.prefix(f"source ~/{directory_app}/.env/bin/activate"): ctx.run("pip install -r requirements.txt") ctx.run("python manage.py migrate") ctx.run(f"sudo supervisorctl restart {app_name}")
def pull_database_from_heroku(c, app_instance): check_if_logged_in_to_heroku(c) delete_local_database(c) local("heroku pg:pull --app {app} DATABASE_URL {local_database}".format( app=app_instance, local_database=LOCAL_DATABASE_NAME)) answer = (input( "Any superuser accounts you previously created locally will" " have been wiped. Do you wish to create a new superuser? (Y/n): "). strip().lower()) if not answer or answer == "y": local("django-admin createsuperuser", pty=True)
def clean_dokku_database(c, dokku_remote, db_instance): local('ssh -t {remote} postgres:export {db_instance} > ' '{db_instance}-backup.pg'.format( remote=dokku_remote, db_instance=db_instance, )) local('echo "DROP SCHEMA public CASCADE; CREATE SCHEMA public;" | ' 'ssh -t {remote} postgres:connect {db_instance}'.format( remote=dokku_remote, db_instance=db_instance, ))
def start(c): """ Start the development environment """ if FRONTEND == "local": local("docker-compose up -d web") else: local("docker-compose up -d web frontend") print("Use `fab sh` to enter the web container and run `djrun`") if FRONTEND != "local": print("Use `fab npm start` to run the front-end tooling")
def deploy(ctx): local("yarn build", echo=True) local("python manage.py collectstatic --noinput", echo=True) rsync(ctx, "static/", "apps/django-sockpuppet-expo/static/", exclude=exclude_dirs) with ctx.cd("apps/django-sockpuppet-expo"): ctx.run('git pull') with ctx.prefix("source .env/bin/activate"): ctx.run('pip3.8 install -r requirements.in') ctx.run('python3.8 manage.py migrate') ctx.run("sudo supervisorctl restart expo:*")
def _db_pull(tables=None): pw = _get_db_pw() tables = "".join(tables) if tables is not None else "" t = datetime.now() fn = 'eamon-{0.year}-{0.month:0>2}-{0.day:0>2}-{0.hour:0>2}-{0.minute:0>2}-{0.second:0>2}.sql'.format( t) print('exporting DB on server...') c.run('mysqldump eamon -u eamon -p{} {} | gzip > {}.gz'.format( pw, tables, fn)) print('downloading DB...') c.get("{}.gz".format(fn)) print('importing DB locally...') if platform.system() == 'Windows': local('7z.exe e ./{}.gz -y'.format(fn), hide='out') else: local('gunzip {}'.format(fn)) # We can sniff out the local PW on *nix but Windows should just use an empty PW. if platform.system() == 'Windows': local_pw = "" else: local_pw = _get_local_db_pw() if local_pw != "": local('mysql -u root -p{} eamon -e "source {}"'.format(local_pw, fn)) else: local('mysql -u root eamon -e "source {}"'.format(fn))
def delete_local_renditions(local_database_name=LOCAL_DATABASE_NAME): try: local( 'sudo -u postgres psql -d {database_name} -c "DELETE FROM images_rendition;"' .format(database_name=local_database_name)) except: pass try: local( 'sudo -u postgres psql -d {database_name} -c "DELETE FROM wagtailimages_rendition;"' .format(database_name=local_database_name)) except: pass
def deploy_to_heroku(c, app_instance, local_branch='master', remote_branch='master'): print('This will push your local "{local_branch}" branch to remote ' '"{remote_branch}" branch.'.format(local_branch=local_branch, remote_branch=remote_branch)) deploy_prompt(c, app_instance) remote_name = setup_heroku_git_remote(c, app_instance) local('git push {remote} {local_branch}:{remote_branch}'.format( remote=remote_name, local_branch=local_branch, remote_branch=remote_branch, ))
def _db_push(prefix="eamon", tables=None): pw = _get_db_pw() tables = " ".join(tables) if tables is not None else "" t = datetime.now() fn = '{0}-{1.year}-{1.month:0>2}-{1.day:0>2}-{1.hour:0>2}-{1.minute:0>2}-{1.second:0>2}.sql'.format( prefix, t) print('exporting DB locally...') local_pw = _get_local_db_pw() if local_pw != "": local('mysqldump eamon -u root -p{} -r {} {}'.format( local_pw, fn, tables)) else: local('mysqldump eamon -u root -r {} {}'.format(fn, tables)) if platform.system() == 'Windows': local('7z.exe a {0}.gz {0} -y'.format(fn), hide='out') else: local('gzip {}'.format(fn)) print('uploading DB...') c.put("{}.gz".format(fn)) _db_backup() print('importing DB on server...') c.run('gunzip {}.gz'.format(fn)) c.run('mysql eamon -u eamon -p{} < {}'.format(pw, fn)) print('cleaning up...') c.run('rm {}'.format(fn))
def _db_pull(tables=None): pw = _get_db_pw() tables = "".join(tables) if tables is not None else "" t = datetime.now() fn = 'eamon-{0.year}-{0.month:0>2}-{0.day:0>2}-{0.hour:0>2}-{0.minute:0>2}-{0.second:0>2}.sql'.format( t) print('exporting DB on server...') c.run('mysqldump eamon -u eamon -p{} {} | gzip > {}.gz'.format( pw, tables, fn)) print('downloading DB...') c.get("{}.gz".format(fn)) print('importing DB locally...') # windows only local('7z.exe e ./{}.gz -y'.format(fn), hide='out') local('mysql -u root eamon -e "source {}"'.format(fn))
def docs(doc_type='html'): os.remove(op.join('docs', module_name + '.rst')) os.remove(op.join('docs', 'modules.rst')) local('sphinx-apidoc -o docs/ ' + module_name) os.chdir('docs') local('make clean') local('make ' + doc_type) os.chdir(CWD) local('open docs/_build/html/index.html')
def deploy_to_heroku(c, app_instance, local_branch='master', remote_branch='master'): check_if_logged_in_to_heroku(c) print( 'This will push your local "{local_branch}" branch to remote ' '"{remote_branch}" branch.'.format( local_branch=local_branch, remote_branch=remote_branch ) ) deploy_prompt(c, app_instance) remote_name = setup_heroku_git_remote(c, app_instance) local('git push {remote} {local_branch}:{remote_branch}'.format( remote=remote_name, local_branch=local_branch, remote_branch=remote_branch, ))
def aws(c, command, aws_access_key_id, aws_secret_access_key, **kwargs): return local( "AWS_ACCESS_KEY_ID={access_key_id} AWS_SECRET_ACCESS_KEY={secret_key} " "aws {command}".format( access_key_id=aws_access_key_id, secret_key=aws_secret_access_key, command=command, ), **kwargs)
def deploy_to_dokku(c, dokku_remote, app_instance, local_branch='master', remote_branch=None): if remote_branch is None: remote_branch = local_branch print('This will push your local "{local_branch}" branch to remote ' '"{remote_branch}" branch.'.format(local_branch=local_branch, remote_branch=remote_branch)) deploy_prompt(c, app_instance) local('git push {remote}:{app} {local_branch}:{remote_branch}'.format( remote=dokku_remote, app=app_instance, local_branch=local_branch, remote_branch=remote_branch, ))
def fetchdb(c): """Pull the live database to the local copy""" c.run('pg_dump -Z1 -cf /tmp/demozoo-fetchdb.sql.gz demozoo') c.get('/tmp/demozoo-fetchdb.sql.gz', '/tmp/demozoo-fetchdb.sql.gz') c.run('rm /tmp/demozoo-fetchdb.sql.gz') local('dropdb -U%s demozoo' % db_username) local('createdb -U%s demozoo' % db_username) local('gzcat /tmp/demozoo-fetchdb.sql.gz | psql -U%s demozoo' % db_username) local('rm /tmp/demozoo-fetchdb.sql.gz')
def aws(c, command, aws_access_key_id, aws_secret_access_key, **kwargs): return local( 'AWS_ACCESS_KEY_ID={access_key_id} AWS_SECRET_ACCESS_KEY={secret_key} ' 'aws {command}'.format( access_key_id=aws_access_key_id, secret_key=aws_secret_access_key, command=command, ), **kwargs )
def compress(path: str): """ Create a compress file of the project :param path: path of the project :return: the name of the zip file """ my_date = datetime.now().strftime(time) split_path = path.split("/") version_path = path + "/versions" name = split_path[len(split_path) - 1] zip_file = my_date + "-" + name + ".tgz" try: local(f'mkdir -p {version_path}') local( f'tar -zcf {version_path + "/" + zip_file} --absolute-names {path} > /dev/null' ) except: pass finally: typer.echo( stylize(f"File created: {zip_file}", fg("green"), attr("bold"))) return zip_file
def check_if_heroku_app_access_granted(c, app_instance): check_if_logged_in_to_heroku(c) # Any command targeting an app would do. This one prints the list of who has access. error = local(f"heroku access --app {app_instance}", hide="both", warn=True).stderr if error: raise Exit( "You do not have access to this app. Please either try to add " "yourself with:\n" f"heroku apps:join --app {app_instance}\n\n" "Or ask a team admin to add you with:\n" f"heroku access:add <your email address> --app {app_instance}")
def make_gobgp_ctn(ctx, tag='gobgp', local_gobgp_path='', from_image='osrg/quagga'): if local_gobgp_path == '': local_gobgp_path = os.getcwd() local('CGO_ENABLED=0 go build "-ldflags=-s -w -buildid=" ./cmd/gobgp') local('CGO_ENABLED=0 go build "-ldflags=-s -w -buildid=" ./cmd/gobgpd') c = CmdBuffer() c << 'FROM {0}'.format(from_image) c << 'COPY gobgp/gobgpd /go/bin/gobgpd' c << 'COPY gobgp/gobgp /go/bin/gobgp' rindex = local_gobgp_path.rindex('gobgp') if rindex < 0: raise Exception('{0} seems not gobgp dir'.format(local_gobgp_path)) workdir = local_gobgp_path[:rindex] os.chdir(workdir) local('echo \'{0}\' > Dockerfile'.format(str(c))) local('docker build -t {0} .'.format(tag)) local('rm Dockerfile')
def pull_database_backup_from_heroku(c, app_instance): check_if_logged_in_to_heroku(c) local("heroku pg:backups:download --app {app}".format(app=app_instance)) # Need to check whether previous command has succeeded # If an error similar to following is raised, the installed version of Postgres is # older than the Heroku version. # pg_restore: [archiver] unsupported version (1.14) in file header local( "pg_restore --clean --no-privileges --no-owner -d {local_database} latest.dump" .format(local_database=LOCAL_DATABASE_NAME)) local("rm latest.dump") print("Database backup restored")
def push_database_to_dokku(c, dokku_remote, app_instance, db_instance): prompt_msg = 'You are about to push your local database to Dokku. ' \ 'It\'s a destructive operation and will override the ' \ 'database on the server. \n' \ 'Please type the database name "{db_instance}" to ' \ 'proceed:\n>>> '.format(db_instance=make_bold(db_instance)) if input(prompt_msg) != db_instance: raise Exit("Aborted") local('ssh {remote} ps:stop {app}'.format(remote=dokku_remote, app=app_instance)) clean_dokku_database(c, dokku_remote, db_instance) local('pg_dump -Fc --no-acl --no-owner -w {local_db} | ' 'ssh -t {remote} postgres:import {db_instance} ' '|| true'.format( local_db=LOCAL_DATABASE_NAME, remote=dokku_remote, db_instance=db_instance, )) local('ssh {remote} ps:start {app}'.format(remote=dokku_remote, app=app_instance))
def make_gobgp_ctn(ctx, tag='gobgp', local_gobgp_path='', from_image='osrg/quagga'): if local_gobgp_path == '': local_gobgp_path = os.getcwd() c = CmdBuffer() c << 'FROM {0}'.format(from_image) c << 'ENV GO111MODULE on' c << 'ADD gobgp /tmp/gobgp' c << 'RUN cd /tmp/gobgp && go install ./cmd/gobgpd ./cmd/gobgp' rindex = local_gobgp_path.rindex('gobgp') if rindex < 0: raise Exception('{0} seems not gobgp dir'.format(local_gobgp_path)) workdir = local_gobgp_path[:rindex] os.chdir(workdir) local('echo \'{0}\' > Dockerfile'.format(str(c))) local('docker build -t {0} .'.format(tag)) local('rm Dockerfile')
def publish(c): """Publish to production""" update_repo() clean(c) with alter_template() as _: local('pelican -s publishconf.py') search_path = pathlib.Path(CONFIG['deploy_path'])/'tipuesearch_content.js' search_path_fix = pathlib.Path(CONFIG['deploy_path'])/'tipuesearch_content.json' if os.path.isfile(search_path): shutil.move(search_path, search_path_fix) # Detect if in local machine or in travis-ci if os.environ.get('TRAVIS', 'false') != 'true': with cd(f"{CONFIG['deploy_path']}"): local('git checkout master') local('git add --all') local(f'''git commit -m "{CONFIG['commit_message']}"''') local('git push -u github master --quiet') local('python ./utils/gitalk.py')
def update_repo(): if os.environ.get('TRAVIS', 'false') != 'true': with cd(f"{CONFIG['deploy_path']}"): local('git pull')
def build(c): """Build local version of site""" local('pelican --settings pelicanconf.py')
def rebuild(c): """`build` with the delete switch""" local('pelican --delete-output-directory --settings pelicanconf.py')
def sdist(): clean() local('python setup.py sdist') local('python setup.py bdist_wheel upload') print(os.listdir('dist'))
def install(): clean() install_deps() local('python setup.py install')
def push_database_to_heroku(c, app_instance): check_if_logged_in_to_heroku(c) prompt_msg = 'You are about to push your local database to Heroku. ' \ 'It\'s a destructive operation and will override the ' \ 'database on the server. \n' \ 'Please type the application name "{app_instance}" to ' \ 'proceed:\n>>> '.format(app_instance=make_bold(app_instance)) if input(prompt_msg) != app_instance: raise Exit("Aborted") local('heroku maintenance:on --app {app}'.format(app=app_instance)) local('heroku ps:stop --app {app} web'.format(app=app_instance)) local('heroku pg:backups:capture --app {app}'.format(app=app_instance)) local('heroku pg:reset --app {app} --confirm {app}'.format(app=app_instance)) local('heroku pg:push --app {app} {local_db} DATABASE_URL'.format( app=app_instance, local_db=LOCAL_DATABASE_NAME )) local('heroku ps:restart --app {app}'.format(app=app_instance)) local('heroku maintenance:off --app {app}'.format(app=app_instance))
def regenerate(c): local('pelican -r -s pelicanconf.py')
def build(c): local('pelican -s pelicanconf.py')
def clean(c): if os.path.isdir(deploy_path): local('rm -rf {}'.format(deploy_path)) local('mkdir {}'.format(deploy_path))
def regenerate(c): """Automatically regenerate site upon file modification""" local('pelican --autoreload --settings pelicanconf.py')
def test_all(): local('tox')
def create_local_database(c, local_database_name=LOCAL_DATABASE_NAME): local('createdb {database_name}'.format( database_name=LOCAL_DATABASE_NAME ))
def lint(): local('flake8 ' + module_name + ' test')
def open_heroku_shell(c, app_instance, shell_command='bash'): check_if_logged_in_to_heroku(c) local('heroku run --app {app} {command}'.format( app=app_instance, command=shell_command, ), pty=True)
def check_if_logged_in_to_heroku(c): if not local("heroku auth:whoami", warn=True): raise Exit( 'Log-in with the "heroku login -i" command before running this ' "command.")
def serve(c): local('cd {} && python -m SimpleHTTPServer'.format(deploy_path))
def get_heroku_variable(c, app_instance, variable): check_if_logged_in_to_heroku(c) return local('heroku config:get {var} --app {app}'.format( app=app_instance, var=variable, )).stdout.strip()
def preview(c): local('pelican -s publishconf.py')
def publish(c): local('pelican -s publishconf.py') rsync(c, deploy_path + '/', dest_path, exclude=".DS_Store", delete=True)
def coverage(): local('coverage local --source ' + module_name + ' setup.py test') local('coverage report -m') local('coverage html') local('open htmlcov/index.html')
def develop(): clean() install_deps() local('python setup.py develop')
def get_heroku_variable(c, app_instance, variable): check_if_logged_in_to_heroku(c) return local("heroku config:get {var} --app {app}".format( app=app_instance, var=variable)).stdout.strip()
def release(): clean() local('pip install -U pip setuptools twine wheel') local('python setup.py sdist bdist_wheel') #local('python setup.py bdist_wheel upload') local('twine upload dist/*')
def prepare(c): local("npx sails run deploy")
def check_if_logged_in_to_heroku(c): if not local('heroku auth:whoami', warn=True): raise Exit( 'Log-in with the "heroku login -i" command before running this ' 'command.' )