def _supervisor_command(command): require('hosts', provided_by=('staging', 'preview', 'production')) #if what_os() == 'redhat': #cmd_exec = "/usr/bin/supervisorctl" #elif what_os() == 'ubuntu': #cmd_exec = "/usr/local/bin/supervisorctl" sudo('supervisorctl %s' % (command), shell=False)
def update_code(): """Updates the source code and its requirements""" require('environment', provided_by=[production, staging]) with settings(hide('stdout', 'stderr')): _update_repo() _update_requirements()
def dump_db(dumpfile="pootle_DB_backup.sql"): """Dumps the DB as a SQL script and downloads it""" require('environment', provided_by=[production, staging]) if ((isfile(dumpfile) and confirm('\n%s already exists locally. Do you ' 'want to overwrite it?' % dumpfile, default=False)) or not isfile(dumpfile)): remote_filename = '%s/%s' % (env['project_path'], dumpfile) if ((exists(remote_filename) and confirm('\n%s already exists. Do you ' 'want to overwrite it?' % remote_filename, default=False)) or not exists(remote_filename)): print('\nDumping DB...') with settings(hide('stderr')): sudo('mysqldump -u %s -p %s > %s' % (env['db_user'], env['db_name'], remote_filename)) get(remote_filename, '.') else: print('\nAborting.') else: print('\nAborting.')
def install_dependencies_pip(): """ Install all dependencies available from pip """ require('environment', provided_by=[dev, prod]) create_virtualenv() # if this is a development install then filter out anything we have a # git repo for. pips_ = PIP_INSTALL.copy() if env.environment == 'development': map(pips_.pop, [k for k in GIT_INSTALL if k in PIP_INSTALL]) if not pips_: print 'No git repos to install' return with lcd(env.doc_root): #XXX create temp requirements file text from list of requirements # it will be destroyed after install is complete requirements = '\n'.join([''.join(p) for p in pips_.items()]) with settings(hide('running')): local("echo '%s' > requirements.txt" % requirements) local('pip install -E %(virtualenv)s -r requirements.txt' % env) local('rm requirements.txt')
def selenium_test(): require('environment', provided_by=('staging', 'preview', 'demo', 'production', 'india')) prompt("Jenkins username:"******"jenkins_user", default="selenium") prompt("Jenkins password:"******"jenkins_password") url = env.selenium_url % {"token": "foobar", "environment": env.environment} local("curl --user %(user)s:%(pass)s '%(url)s'" % \ {'user': env.jenkins_user, 'pass': env.jenkins_password, 'url': url})
def test_require_key_exists_empty_list(): """ When given a single existing key but the value is an empty list, require() aborts """ # 'hosts' is one of the default values, so we know it'll be there require('hosts')
def install_site(): """Configures the server and enables the site""" require('environment', provided_by=[production, staging]) with settings(hide('stdout', 'stderr')): update_config() enable_site()
def hotfix_deploy(): """ deploy ONLY the code with no extra cleanup or syncing for small python-only hotfixes """ if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \ not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False) or \ not console.confirm('HEY!!!! YOU ARE ONLY DEPLOYING CODE. THIS IS NOT A NORMAL DEPLOY. COOL???', default=False): utils.abort('Deployment aborted.') require('root', provided_by=('staging', 'preview', 'production', 'india')) run('echo ping!') # workaround for delayed console response try: execute(update_code) except Exception: execute(mail_admins, "Deploy failed", "You had better check the logs.") # hopefully bring the server back to life execute(services_restart) raise else: execute(services_restart) execute(record_successful_deploy)
def deploy(): """deploy code to remote host by checking out the latest via git""" if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \ not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False): utils.abort('Deployment aborted.') require('root', provided_by=('staging', 'preview', 'production', 'india')) run('echo ping!') # workaround for delayed console response try: execute(update_code) execute(update_virtualenv) execute(clear_services_dir) set_supervisor_config() if env.should_migrate: execute(migrate) execute(_do_collectstatic) execute(do_update_django_locales) execute(version_static) if env.should_migrate: execute(flip_es_aliases) except Exception: execute(mail_admins, "Deploy failed", "You had better check the logs.") # hopefully bring the server back to life execute(services_restart) raise else: execute(services_restart) execute(record_successful_deploy)
def apache_reload(): """reload Apache on remote host""" require('root', provided_by=('staging', 'preview', 'production')) if what_os() == 'redhat': sudo('/etc/init.d/httpd reload') elif what_os() == 'ubuntu': sudo('/etc/init.d/apache2 reload')
def load_db(dumpfile=None): """Loads data from a SQL script to Pootle DB""" require('environment', provided_by=[production, staging]) if dumpfile is not None: if isfile(dumpfile): remote_filename = '%(project_path)s/DB_backup_to_load.sql' % env if (exists(remote_filename) and confirm('\n%s already exists. Do you want to overwrite it?' % remote_filename, default=False)) or not exists(remote_filename): print('\nLoading data into the DB...') with settings(hide('stderr')): put(dumpfile, remote_filename, use_sudo=True) sudo('mysql -u %s -p %s < %s' % (env['db_user'], env['db_name'], remote_filename)) else: print('\nAborting.') else: print('\nERROR: The file "%s" does not exist. Aborting.' % dumpfile) else: print('\nERROR: A dumpfile must be provided. Aborting.')
def set_supervisor_config(): """Upload and link Supervisor configuration from the template.""" require('environment', provided_by=('staging', 'preview', 'production', 'india')) execute(set_celery_supervisorconf) execute(set_djangoapp_supervisorconf) execute(set_formsplayer_supervisorconf) execute(set_pillowtop_supervisorconf)
def update_code(branch="master"): """Updates the source code and its requirements""" require('environment', provided_by=[production, staging]) with settings(hide('stdout', 'stderr')): _checkout_repo(branch=branch) _update_requirements()
def setup_db(): """Runs all the necessary steps to create the DB schema from scratch""" require("environment", provided_by=[production, staging]) syncdb() migratedb() initdb()
def migrate(): """ run south migration on remote environment """ require('code_root', provided_by=('production', 'demo', 'preview', 'staging', "india")) with cd(env.code_root): sudo('%(virtualenv_root)s/bin/python manage.py sync_finish_couchdb_hq' % env, user=env.sudo_user) sudo('%(virtualenv_root)s/bin/python manage.py syncdb --noinput' % env, user=env.sudo_user) sudo('%(virtualenv_root)s/bin/python manage.py migrate --noinput' % env, user=env.sudo_user)
def run(self, revision): require('project_name', 'package_name', 'deploy_env', 'scm_repository') env.revision = revision self.step_label('Pre deployment steps.') self.pre_deployment(revision) self.step_label('Deployment steps.') self.deployment(revision) self.step_label('Applying library updates.') self.update(revision) self.step_label('Running post deployment checks.') self.check(revision) self.step_label('Migrate any data.') self.migrate(revision) self.step_label('Make this revision the current one.') self.make_current(revision) self.step_label('Restart the application.') self.restart(revision)
def dump_db(dumpfile="pathagarh_DB_backup.sql"): """Dumps the DB as a SQL script and downloads it""" require('environment', provided_by=[production, staging]) if isdir(dumpfile): print("dumpfile '%s' is a directory! Aborting." % dumpfile) elif (not isfile(dumpfile) or confirm('\n%s already exists locally. Do you want to overwrite it?' % dumpfile, default=False)): remote_filename = '%s/%s' % (env['project_path'], dumpfile) if (not exists(remote_filename) or confirm('\n%s already exists. Do you want to overwrite it?' % remote_filename, default=False)): print('\nDumping DB...') with settings(hide('stderr')): run('mysqldump -u %s %s %s > %s' % (env['db_user'], env['db_password_opt'], env['db_name'], remote_filename)) get(remote_filename, '.') run('rm %s' % (remote_filename)) else: print('\nAborting.') else: print('\nAborting.')
def dummy(): """ Dummy task for testing """ print(_yellow('>>> starting %s()' % _fn())) require('settings', provided_by=[vagrant]) run('uname -a && hostname && pwd')
def maintenance_off(): """ Turn maintenance mode off. """ print(_yellow('>>> starting %s()' % _fn())) require('settings', provided_by=[vagrant]) run('rm -f %(repo_path)s/.upgrading' % env)
def create_db(): """Creates a new DB""" require('environment', provided_by=[production, staging]) with settings(hide('stderr')): sudo("mysql -u %(db_user)s -p -e 'CREATE DATABASE %(db_name)s DEFAULT " "CHARACTER SET utf8 COLLATE utf8_general_ci;'" % env)
def restart_services(): if not console.confirm('Are you sure you want to restart the services on ' '{env.environment}?'.format(env=env), default=False): utils.abort('Task aborted.') require('root', provided_by=('staging', 'preview', 'production', 'india')) execute(services_restart)
def deploy_static(): if not os.path.exists(".build") and not confirm("Looks like there is no build. Continue anyway? [DANGEROUS]"): abort("Aborting at user request. Type \"fab build\" before deployment.") require('hosts', provided_by=[staging, prod]) require('staticpath', provided_by=[staging,prod]) _put_dir('static', env.staticpath )
def dump_db(dumpfile="pootle_DB_backup.sql"): """Dumps the DB as a SQL script and downloads it""" require("environment", provided_by=[production, staging]) if isdir(dumpfile): abort("dumpfile '%s' is a directory! Aborting." % dumpfile) elif not isfile(dumpfile) or confirm( "\n%s already exists locally. Do you want to overwrite it?" % dumpfile, default=False ): remote_filename = "%s/%s" % (env["project_path"], dumpfile) if not exists(remote_filename) or confirm( "\n%s already exists. Do you want to overwrite it?" % remote_filename, default=False ): print("\nDumping DB...") with settings(hide("stderr")): run( "mysqldump -u %s %s %s > %s" % (env["db_user"], env["db_password_opt"], env["db_name"], remote_filename) ) get(remote_filename, ".") run("rm %s" % (remote_filename)) else: abort("\nAborting.") else: abort("\nAborting.")
def mysql_create_tables(): """ Create the application tables. Assumes that the database was already created and an user was granted `create` privileges. """ require('environment', provided_by=[production, staging]) exists = mysql_check_db_exists() if not exists: abort(colors.red("Unable to create tables in database '%(db_name)s'." "The database does not exist" % env)) total_tables = mysql_count_tables() if total_tables > 0: print(colors.red("The database already contains {} tables." .format(total_tables))) sys.exit("If you need to re-create the tables please run: " "\n\t fab {} mysql_reset_tables" .format(env.environment)) login_path = _mysql_login_path() files = ['001/upgrade.sql', '002/upgrade.sql', '002/data.sql'] with lcd('../db/'): for sql in files: cmd = ("mysql --login-path={} %(db_name)s < {}" .format(login_path, sql) % env) local(cmd)
def _git_clone_tag(tag=None): """ Clone a `slim` version of the code Note: if the tag was already deployed once we create a backup """ require('environment', provided_by=[production, staging]) url = env.project_repo if tag is None: print(colors.yellow( "No tag specified. Attempt to read the last tag from: {}" .format(url))) tag = git_tags(url=url, last_only=True) if not tag: abort(colors.red('\nPlease specify a valid tag.')) # Clone the code to src/v0.0.1` destination = ('%(project_path_src)s/v{}'.format(tag) % env) cmd = ('git clone -b {} --single-branch %(project_repo)s {}' .format(tag, destination) % env) if exists(destination): with cd(env.project_path_src): cmd_mv = 'mv v{} backup_`date "+%Y-%m-%d"`_v{}'.format(tag, tag) sudo(cmd_mv, user=env.server_user) sudo(cmd, user=env.server_user) _fix_perms(destination) with cd(env.project_path_src): # Create symlink sudo('ln -nsf {} current'.format(destination), user=env.server_user)
def _copy_db(): """Copy the data in the source DB into the DB to use for deployment.""" require('environment', provided_by=[production, staging]) print('\n\nCloning DB...') with settings(hide('stderr'), temp_dump='%(project_path)s/temporary_DB_backup.sql' % env): print('\nDumping DB data...') run("mysqldump -u %(db_user)s %(db_password_opt)s %(source_db)s > " "%(temp_dump)s" " || { test root = '%(db_user)s' && exit $?; " "echo 'Trying again, with MySQL root DB user'; " "mysqldump -u root %(db_root_password_opt)s %(source_db)s > " "%(temp_dump)s;}" % env) print('\nLoading data into the DB...') run("mysql -u %(db_user)s %(db_password_opt)s %(db_name)s < " "%(temp_dump)s" " || { test root = '%(db_user)s' && exit $?; " "echo 'Trying again, with MySQL root DB user'; " "mysql -u root %(db_root_password_opt)s %(db_name)s < " "%(temp_dump)s;}" % env) run('rm -f %(temp_dump)s' % env)
def mysql_conf(): """ Store mysql login credentials to the encrypted file ~/.mylogin.cnf Once created you can connect to the database without typing the password. Example: $ mysql_config_editor set --login-path=local --user=root --password \ --host=localhost $ mysql --login-path=local For more details see: https://dev.mysql.com/doc/refman/5.6/en/mysql-config-editor.html """ require('environment', provided_by=[production, staging]) print("Storing the database credentials to ~/.mylogin.cnf") print(colors.yellow("⚠ Plese note that if you have a '#' in your password" " then you have to specify the password in quotes.")) cmd = ("mysql_config_editor set " " --login-path=fabric_%(db_host)s " " --user=%(db_user)s " " --password " " --host=%(db_host)s" % env) local(cmd, capture=True)
def _copy_db(): """Copies the data in the source DB into the DB to use for deployment""" require("environment", provided_by=[production, staging]) with settings(hide("stderr"), temp_dump="/tmp/temporary_DB_backup.sql"): print("\nDumping DB data...") run( "mysqldump -u %(db_user)s %(db_password_opt)s %(source_db)s > " "%(temp_dump)s" " || { test root = '%(db_user)s' && exit $?; " "echo 'Trying again, with MySQL root DB user'; " "mysqldump -u root %(db_root_password_opt)s %(source_db)s > " "%(temp_dump)s;}" % env ) print("\nLoading data into the DB...") run( "mysql -u %(db_user)s %(db_password_opt)s %(db_name)s < " "%(temp_dump)s" " || { test root = '%(db_user)s' && exit $?; " "echo 'Trying again, with MySQL root DB user'; " "mysql -u root %(db_root_password_opt)s %(db_name)s < " "%(temp_dump)s;}" % env ) run("rm -f %(temp_dump)s" % env)
def load_db(dumpfile=None): """Loads data from a SQL script to Pootle DB""" require("environment", provided_by=[production, staging]) if dumpfile is not None: if isfile(dumpfile): remote_filename = "%(project_path)s/DB_backup_to_load.sql" % env if not exists(remote_filename) or confirm( "\n%s already exists. Do you want to overwrite it?" % remote_filename, default=False ): print("\nLoading data into the DB...") with settings(hide("stderr")): put(dumpfile, remote_filename) run( "mysql -u %s %s %s < %s" % (env["db_user"], env["db_password_opt"], env["db_name"], remote_filename) ) run("rm %s" % (remote_filename)) else: abort("\nAborting.") else: abort('\nERROR: The file "%s" does not exist. Aborting.' % dumpfile) else: abort("\nERROR: A (local) dumpfile must be provided. Aborting.")
def deploy(): """Deploys the project site to the webservers """ require('settings', provided_by=[production, staging]) print print header("Deploying") deploy_path = env.filebase + '/' + env.branch if exists(deploy_path) and check_git(deploy_path): with cd(deploy_path): print 'Syncing files' sync_local_from_remote('origin', env.branch) print 'Syncing submodules' sync_submodules() else: print "There is not a branch with that name" exit() # On the staging server we need to setup special Apache rules if env.settings == 'staging': setup_staging(env.branch) # After everything is setup, clear the site's cache with cd(deploy_path): drupal.cache.clear() # Tell New Relic about our deploy newrelic.report_deploy(env.new_relic_api_key, env.new_relic_app_name)
def reset_local_db(): """ Reset local database from remote host """ require('code_root', provided_by=('production', 'staging')) if env.environment == 'production': utils.abort('Local DB reset is for staging environment only') question = 'Are you sure you want to reset your local '\ 'database with the %(environment)s database?' % env sys.path.append('.') if not console.confirm(question, default=False): utils.abort('Local database reset aborted.') local_db = loc['default']['NAME'] remote_db = remote['default']['NAME'] with settings(warn_only=True): local('dropdb %s' % local_db) local('createdb %s' % local_db) host = '%s@%s' % (env.user, env.hosts[0]) local('ssh -C %s sudo -u commcare-hq pg_dump -Ox %s | psql %s' % (host, remote_db, local_db))
def bootstrap(): """Initialize remote host environment (virtualenv, deploy, update) """ require('root', provided_by=('staging', 'production')) sudo('mkdir -p %(root)s' % env, shell=False, user=env.sudo_user) execute(clone_repo) # copy localsettings if it doesn't already exist in case any management # commands we want to run now would error otherwise with cd(env.code_root): sudo('cp -n localsettings.example.py localsettings.py', user=env.sudo_user) with cd(env.code_root_preindex): sudo('cp -n localsettings.example.py localsettings.py', user=env.sudo_user) update_code() execute(create_virtualenvs) execute(update_virtualenv) execute(setup_dirs) execute(update_apache_conf)
def bootstrap(): """Creates initial directories and virtualenv""" require('environment', provided_by=[production, staging]) if (exists('%(project_path)s' % env) and \ confirm('%(project_path)s already exists. Do you want to continue?' \ % env, default=False)) or not exists('%(project_path)s' % env): print('Bootstrapping initial directories...') with settings(hide('stdout', 'stderr')): _init_directories() _init_virtualenv() _clone_repo() _checkout_repo() _install_requirements() else: print('Aborting.')
def bootstrap(tag='master'): """Bootstrap the deployment using the specified branch""" require('environment', provided_by=[production, staging]) print(MOTD_PROD if _is_prod() else MOTD_STAG) msg = colors.red('\n%(project_path)s exists. ' 'Do you want to continue anyway?' % env) if (not exists('%(project_path)s' % env) or confirm(msg, default=False)): with settings(hide('stdout', 'stderr')): _init_directories() _init_virtualenv() _git_clone_tag(tag=tag) _install_requirements() update_config(tag=tag) # upload new config files enable_site() else: sys.exit('\nAborting.')
def mysql_conf(): """Sets up .my.cnf file for passwordless MySQL operation""" require('environment', provided_by=[production, staging]) print('\n\nSetting up MySQL password configuration...') conf_filename = '~/.my.cnf' if (not exists(conf_filename) or confirm('\n%s already exists. Do you want to overwrite it?' % conf_filename, default=False)): with settings(hide('stdout', 'stderr')): upload_template('deploy/my.cnf', conf_filename, context=env) run('chmod 600 %s' % conf_filename) else: abort('\nAborting.')
def mysql_conf_test(): """ Check if a configuration was created for the host""" require('environment', provided_by=[production, staging]) from subprocess import Popen, PIPE login_path = _mysql_login_path() cmd = ("mysql_config_editor print --login-path={} 2> /dev/null".format( login_path) % env) proc = Popen(cmd, shell=True, stdout=PIPE) (out, err) = proc.communicate() # print("Checking mysql login path: {}".format(login_path)) has_config = ("" != out) if not has_config: print("There are no mysql credentials stored in ~/.mylogin.cnf file." " Please store the database credentials by running: \n\t" " fab {} mysql_conf".format(env.environment)) sys.exit('\nAborting.')
def stage_feature(): """Deploys a Pootle server for testing a feature branch. This copies the DB from a previous Pootle deployment. """ require('environment', provided_by=[staging]) # Run the required commands to deploy a new Pootle instance based on a # previous staging one and using the specified branch. bootstrap() create_db() _copy_db() # Upload custom settings before calling the update_db() command. update_config() update_db() deploy_static() enable_site() print('\n\nSuccessfully deployed at:\n\n\thttp://%(project_url)s\n' % env)
def update(): """ In a development environment, update all develop branches. """ require('environment', provided_by=[dev, prod]) if env.environment != 'development': raise Exception('must be in a development environment in order to' 'update develop branches.') else: with lcd('%(doc_root)s/dependencies' % env): for git_dir, opts in GIT_INSTALL.items(): env.git_repo = git_dir if (_exists('%(doc_root)s/dependencies/%(git_repo)s' % env) and 'development' in opts and 'checkout' not in opts): with lcd(git_dir): print 'Updating git repo: %(git_repo)s' % env local('git pull --ff')
def run_backup(): """Run backup script on remote""" require('environment', provided_by=[stage]) if not os.path.exists(env['project']['project_backup_path']): local('mkdir %(project_backup_path)s' % env['project']) with cd('%(application_shared_path)s' % env['application']), shell_env( COMPOSE_PROJECT_NAME='%(application_name)s' % env['application']): run('%(application_maintenance_backup_command)s' % env['application']) with cd('%(application_tmp_path)s' % env['application']): run('install --owner {user} {src} {dest}'.format( user=env['user'], src= '%(application_backup_volume_path)s/%(application_backup_filename)s' % env['application'], dest='%(application_tmp_path)s/' % env['application'])) execute(download)
def clean(): """ In a development environment, remove all installed packages and symlinks. """ require('environment', provided_by=[dev, prod]) if env.environment != 'development': abort('Must be in a development environment.') else: with lcd('%(doc_root)s' % env): gitcmd = 'git clean -%sdx -e \!settings.py' print('Files to be removed:') local(gitcmd % 'n') if confirm( 'Are you certain you would like to remove these files?'): local(gitcmd % 'f') else: abort('Aborting clean.')
def update_config(): """Updates server configuration files""" require('environment', provided_by=[production, staging]) with settings(hide('stdout', 'stderr')): # Configure VirtualHost upload_template('deploy/%(environment)s/virtualhost.conf' % env, env.vhost_file, context=env, use_sudo=True) # Configure WSGI application upload_template('deploy/pootle.wsgi', env.wsgi_file, context=env) # Configure and install settings upload_template('deploy/%(environment)s/settings.conf' % env, '%(project_settings_path)s/90-%(environment)s-local.conf' \ % env, context=env)
def link_webserver_conf(maintenance=False): """link the webserver conf file""" require('vcs_root_dir', provided_by=env.valid_envs) if env.webserver is None: return # TODO: if you want to deploy this separate to opencontracting then # you need to uncomment various lines below # create paths in the vcs checkout vcs_config_stub = path.join(env.vcs_root_dir, env.webserver, env.environment) vcs_config_live = vcs_config_stub + '.conf' vcs_config_include = vcs_config_stub + '_include.conf' # create paths in the webserver config webserver_conf = _webserver_conf_path() webserver_include = _webserver_include_path() # ensure the includes dir exists webserver_include_dir = '/etc/apache2/sites-available/includes' fablib._create_dir_if_not_exists(webserver_include_dir) # ensure the main file is linked properly if not files.exists(vcs_config_live): utils.abort('No %s conf file found - expected %s' % (env.webserver, vcs_config_live)) fablib._delete_file(webserver_conf) fablib._link_files(vcs_config_live, webserver_conf) # now manage the include file if maintenance: fablib._delete_file(webserver_include) else: if not files.exists(vcs_config_include): utils.abort('No %s conf file found - expected %s' % (env.webserver, vcs_config_include)) fablib._delete_file(webserver_include) fablib._link_files(vcs_config_include, webserver_include) # debian has sites-available/sites-enabled split with links if fablib._linux_type() == 'debian': webserver_conf_enabled = webserver_conf.replace('available', 'enabled') fablib._link_files(webserver_conf, webserver_conf_enabled) fablib.webserver_configtest()
def delete_old_rollback_versions(keep=None): """Delete old rollback directories, keeping the last "keep" (default 5)".""" require('prev_root', provided_by=env.valid_envs) # the -1 argument ensures one directory per line prev_versions = run('ls -1 ' + env.prev_root).split('\n') if keep is None: if 'versions_to_keep' in env: keep = env.versions_to_keep else: keep = 5 else: keep = int(keep) if keep == 0: return versions_to_keep = -1 * int(keep) prev_versions_to_delete = prev_versions[:versions_to_keep] for version_to_delete in prev_versions_to_delete: sudo_or_run('rm -rf ' + path.join(env.prev_root, version_to_delete.strip()))
def stage_feature(branch=None, repo='git://github.com/translate/pootle.git'): """Deploys a Pootle server for testing a feature branch. This copies the DB from a previous Pootle deployment. """ require('environment', provided_by=[staging]) # Reload the current environment with new settings based on the # provided parameters. _reload_with_new_settings(branch, repo) # Run the required commands to deploy a new Pootle instance based on a # previous staging one and using the specified branch. bootstrap(branch) create_db() _copy_db() deploy_static() install_site() print('\n\nSuccessfully deployed at:\n\n\thttp://%(project_url)s\n' % env)
def version(): """ return the deployed VCS revision and commit comments""" require('server_project_home', 'repo_type', 'vcs_root_dir', 'repository', provided_by=env.valid_envs) if env.repo_type == "git": with cd(env.vcs_root_dir): sudo_or_run('git log | head -5') elif env.repo_type == "svn": _get_svn_user_and_pass() with cd(env.vcs_root_dir): with hide('running'): cmd = 'svn log --non-interactive --username %s --password %s | head -4' % ( env.svnuser, env.svnpass) sudo_or_run(cmd) else: utils.abort('Unsupported repo type: %s' % (env.repo_type))
def what_os(): with settings(warn_only=True): require('environment', provided_by=('staging','production')) if getattr(env, 'host_os_map', None) is None: #prior use case of setting a env.remote_os did not work when doing multiple hosts with different os! Need to keep state per host! env.host_os_map = defaultdict(lambda: '') if env.host_os_map[env.host_string] == '': print 'Testing operating system type...' if(files.exists('/etc/lsb-release',verbose=True) and files.contains(text='DISTRIB_ID=Ubuntu', filename='/etc/lsb-release')): remote_os = 'ubuntu' print 'Found lsb-release and contains "DISTRIB_ID=Ubuntu", this is an Ubuntu System.' elif(files.exists('/etc/redhat-release',verbose=True)): remote_os = 'redhat' print 'Found /etc/redhat-release, this is a RedHat system.' else: print 'System OS not recognized! Aborting.' exit() env.host_os_map[env.host_string] = remote_os return env.host_os_map[env.host_string]
def deploy(): """ deploy code to remote host by checking out the latest via git """ if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \ not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False): utils.abort('Deployment aborted.') require('root', provided_by=('staging', 'production', 'india')) run('echo ping!') #hack/workaround for delayed console response try: execute(update_code) execute(update_env) execute(clear_services_dir) upload_and_set_supervisor_config() execute(migrate) execute(_do_collectstatic) execute(version_static) finally: # hopefully bring the server back to life if anything goes wrong execute(services_restart)
def new_release(version, debug='no'): """ GitFlow new release """ require('stage', provided_by=(stable, development)) require('settings', provided_by=(stable, development)) # Set env. env.user = env.settings['user'] env.host_string = env.settings['host'] if debug == 'no': _hide = ('stderr', 'stdout', 'warnings', 'running') else: _hide = () with hide(*_hide): local('git flow release start %s' % (version, ), capture=True) local('git commit -am \'Bumped version to %s\'' % (version, )) local('git flow release finish -m %s -p %s' % (version, version), capture=True)
def drop_deployment(): """Wipe out a Pootle deployment. The deployment might have not been completed. """ require('environment', provided_by=[production, staging]) try: disable_site() except: print('\n\nSeems that the site was not enabled on Apache.') try: drop_db() except: print("\n\nSeems that database didn't exist.") _remove_config() _remove_directories() print('\n\nRemoved Pootle deployment for http://%(project_url)s' % env)
def bootstrap(branch="master"): """Bootstraps a Pootle deployment using the specified branch""" require('environment', provided_by=[ubuntu, fedora]) if (not exists('%(project_path)s' % env) or confirm( '\n%(project_path)s already exists. Do you want to continue?' % env, default=False)): print('Bootstrapping initial directories...') with settings(hide('stdout', 'stderr')): _init_directories() _init_virtualenv() _clone_repo() run("touch %(project_repo_path)s/../__init__.py" % env) _checkout_repo(branch=branch) with prefix('source %(env_path)s/bin/activate' % env): _install_requirements() else: print('Aborting.')
def create_db(): """Create a new DB.""" require('environment', provided_by=[production, staging]) create_db_cmd = ("CREATE DATABASE `%(db_name)s` " "DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci;" % env) grant_db_cmd = ("GRANT ALL PRIVILEGES ON `%(db_name)s`.* TO `%(db_user)s`" "@localhost IDENTIFIED BY \"%(db_password)s\"; " "FLUSH PRIVILEGES;" % env) print('\n\nCreating DB...') with settings(hide('stderr')): run(("mysql -u %(db_user)s %(db_password_opt)s -e '" % env) + create_db_cmd + ("' || { test root = '%(db_user)s' && exit $?; " % env) + "echo 'Trying again, with MySQL root DB user'; " + ("mysql -u root %(db_root_password_opt)s -e '" % env) + create_db_cmd + grant_db_cmd + "';}")
def env_common(): require('inventory', 'environment') servers = read_inventory_file(env.inventory) _setup_path() proxy = servers['proxy'] webworkers = servers['webworkers'] postgresql = servers['postgresql'] couchdb = servers['couchdb'] touchforms = servers['touchforms'] elasticsearch = servers['elasticsearch'] celery = servers['celery'] rabbitmq = servers['rabbitmq'] # if no server specified, just don't run pillowtop pillowtop = servers.get('pillowtop', []) deploy = servers.get('deploy', servers['postgresql'])[:1] env.roledefs = { 'couch': couchdb, 'pg': postgresql, 'rabbitmq': rabbitmq, 'django_celery': celery, 'sms_queue': celery, 'reminder_queue': celery, 'pillow_retry_queue': celery, 'django_app': webworkers, 'django_pillowtop': pillowtop, 'formsplayer': touchforms, 'staticfiles': proxy, 'lb': [], # having deploy here makes it so that # we don't get prompted for a host or run deploy too many times 'deploy': deploy, # fab complains if this doesn't exist 'django_monolith': [], } env.roles = ['deploy'] env.hosts = env.roledefs['deploy'] env.supervisor_roles = ROLES_ALL_SRC
def update_virtualenv(preindex=False): """ update external dependencies on remote host assumes you've done a code update""" require('code_root', provided_by=('staging', 'production', 'india')) if preindex: root_to_use = env.code_root_preindex env_to_use = env.virtualenv_root_preindex else: root_to_use = env.code_root env_to_use = env.virtualenv_root requirements = posixpath.join(root_to_use, 'requirements') with cd(root_to_use): cmd = ['source %s/bin/activate && pip install' % env_to_use] cmd += [ '--requirement %s' % posixpath.join(requirements, 'prod-requirements.txt') ] cmd += [ '--requirement %s' % posixpath.join(requirements, 'requirements.txt') ] sudo(' '.join(cmd), user=env.sudo_user)
def stage_deploy(): stage_set('staging') require('stage', provided_by=(production, staging)) with shell_env(COMPOSE_FILE=env.docker_file, COMPOSE_HTTP_TIMEOUT='300'): with cd(env.root_dir): run('git fetch origin %s' % env.branch) run('git checkout %s' % env.branch) run('git pull origin %s' % env.branch) run('docker-compose down') run('docker-compose up -d') run('docker-compose exec backend pip install -r requirements/staging.txt' ) run('docker-compose exec backend python manage.py migrate') run('docker-compose exec backend python manage.py collectstatic --noinput' ) run('docker-compose exec nodejs9 npm i') run('docker-compose -f %s build' % env.docker_file) run('docker-compose down') run('docker-compose -f %s down' % env.docker_file) run('docker-compose -f %s up -d' % env.docker_file)
def reup(branch=None, nobuild=False): require('stage', provided_by=(staging, production)) if exists_local('.env'): move_env_file(env.user) branch = branch or env.default_branch with cd('/home/{}/{}'.format(env.user, ocean)): with prefix(". .env"): run('git fetch') run('git checkout {}'.format(branch)) run('git pull') with shell_env(DJANGO_SETTINGS_MODULE=env.DJANGO_SETTINGS_MODULE): if nobuild: sudo('docker-compose -f {} up -d'.format( env.docker_compose_file)) else: sudo('docker-compose -f {} up --build -d'.format( env.docker_compose_file))
def get_db_dump(commit=None): require('stage', provided_by=(staging, production)) if commit: with cd('/home/{}/{}/docker/postgresql/dumps'.format(env.user, ocean)): if exists('{}.sql'.format(commit)): get('{}.sql'.format(commit), '%(basename)s') else: print( 'We are going to make a new database dump and download it for you') time_stamp = datetime.now().strftime('%Y_%m_%d__%H_%M_%S') with cd('/home/{}/{}'.format(env.user, ocean)): with prefix(". .env"): run('docker-compose -f {0} exec -T' ' db pg_dump -d ${{POSTGRES_DB}} -U ${{POSTGRES_USER}} -h localhost -F c >' ' ./docker/postgresql/dumps/manual_dump_{1}.sql'.format( env.docker_compose_file, time_stamp)) with cd('/home/{}/{}/docker/postgresql/dumps'.format(env.user, ocean)): if exists('manual_dump_{}.sql'.format(time_stamp)): get('manual_dump_{}.sql'.format(time_stamp), '%(dirname)s')
def send_file(upfile=None): require('environment', provided_by=[staging]) print('\n\nSending file...') if upfile is not None: if isfile(upfile): remote_filename = '%s/%s' % (env['project_path'], upfile) if (not exists(remote_filename) or confirm('\n%s already exists. Do you want to overwrite it?' % remote_filename, default=False)): with settings(hide('stderr')): put(upfile, remote_filename) else: abort('\nAborting.') else: abort('\nERROR: The file "%s" does not exist. Aborting.' % upfile) else: abort('\nERROR: A (local) file must be provided. Aborting.')
def set_up_celery_daemon(): require('vcs_root_dir', 'project_name', provided_by=env) for command in ('celerybeat', 'celeryd'): command_project = command + '_' + env.project_name celery_run_script_location = path.join(env['vcs_root_dir'], 'celery', 'init', command) celery_run_script = path.join('/etc', 'init.d', command_project) celery_configuration_location = path.join(env['vcs_root_dir'], 'celery', 'config', command) celery_configuration_destination = path.join('/etc', 'default', command_project) sudo_or_run(" ".join( ['cp', celery_run_script_location, celery_run_script])) sudo_or_run(" ".join(['chmod', '+x', celery_run_script])) sudo_or_run(" ".join([ 'cp', celery_configuration_location, celery_configuration_destination ])) sudo_or_run('/etc/init.d/%s restart' % command_project)
def install_supervisor(): require('environment', 'project_root', 'virtualenv_root', 'sudo_user', provided_by='setup_env') #we don't install supervisor in the virtualenv since we want it to be able to run systemwide. sudo('pip install supervisor' % env, pty=True, shell=True) #create the standard conf file sudo('echo_supervisord_conf > /tmp/supervisord.conf' % env) sudo('mv /tmp/supervisord.conf /etc/supervisord.conf') #uncomment the include directive in supervisord.conf so we can point it to our supervisor conf uncomment('/etc/supervisord.conf', '\;\\[include\\]', use_sudo=True, char=';', backup='.bak') sudo( "echo 'files = %(supervisor_conf_root)s/*.conf' >> /etc/supervisord.conf" % env) init_temp_path = '/tmp/supervisor_init.tmp' put(env.supervisor_init_path, init_temp_path) sudo('chown root %s' % init_temp_path) sudo('chgrp root %s' % init_temp_path) sudo('chmod +x %s' % init_temp_path) sudo('mv %s /etc/init.d/supervisord' % init_temp_path) sudo('chmod +x /etc/init.d/supervisord') if env.os == 'ubuntu': sudo('update-rc.d supervisord defaults') elif env.os == 'redhat': sudo('chkconfig --add supervisord') sudo('service supervisord start') #update supervisor instance _supervisor_command('update')
def deploy(tests='yes'): ''' Deploys project to previously set stage. ''' require('stage', provided_by=(stable, development)) require('settings', provided_by=(stable, development)) # Set env. env.user = env.settings['user'] env.host_string = env.settings['host'] with hide('stderr', 'stdout', 'warnings', 'running'): if tests == 'yes': with lcd(project_settings['local']['code_src_directory']): run_tests() with cd(env.settings['code_src_directory']): pull_repository() with virtualenv(env.settings['venv_directory']): with cd(env.settings['code_src_directory']): collect_static() install_requirements() migrate_models() restart_application()