def build_js_dist(): if not exists(config('basedir')): print("Directory %s doesn't exist yet" % config('basedir')) print("Run fabric with the 'create_srv_dir' command.") with cd(config('basedir')): # sudo('cd /srv/test.dijkdata.nl', user='******') sudo('npm install', user='******') sudo('grunt build', user='******')
def solr_update_index(): print('SOLR: rebuilding index') if not exists(config('basedir')): abort("Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): if confirm("Are you sure you want to update the index?"): sudo("bin/django update_index", user='******')
def solr_rebuild_index(): print('SOLR: rebuilding index') if not exists(config('basedir')): abort("Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): if confirm( "Rebuilding (instead of updating) the index, are you sure?"): sudo("bin/django rebuild_index", user='******')
def solr_update_index(): print('SOLR: rebuilding index') if not exists(config('basedir')): abort( "Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): if confirm("Are you sure you want to update the index?"): sudo("bin/django update_index", user='******')
def solr_rebuild_index(): print('SOLR: rebuilding index') if not exists(config('basedir')): abort( "Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): if confirm("Rebuilding (instead of updating) the index, are you sure?"): sudo("bin/django rebuild_index", user='******')
def initial_symlink_django_logrotate(): """Install the logrotate script for this site's django.log.""" logrotates = [filename for filename in os.listdir('etc/') if filename.endswith('.logrotate')] for logrotate in logrotates: source = os.path.join(config('basedir'), 'etc', logrotate) target = os.path.join('/etc/logrotate.d/', config('domainname') + '.logrotate') sudo("rm -f %s" % target) sudo("ln -s %s %s" % (source, target))
def solr_build_schema(): print('SOLR: building schema') if not exists(config('basedir')): abort("Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): sudo(("bin/django build_solr_schema > " "{basedir}/etc/solr/conf/schema.xml".format( basedir=config('basedir'))), user='******')
def sync_and_migrate(): """Calls bin/django syncdb and migrate.""" if confirm("Sync and migrate the database?", default=True): if config('deployment_type') == 'development': # Do commands locally local("bin/django syncdb") local("bin/django migrate") else: with cd(config('basedir')): sudo("bin/django syncdb", user='******') sudo("bin/django migrate", user='******')
def solr_build_schema(): print('SOLR: building schema') if not exists(config('basedir')): abort( "Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): sudo(("bin/django build_solr_schema > " "{basedir}/etc/solr/conf/schema.xml".format( basedir=config('basedir')) ), user='******')
def solr_link(): print('SOLR: linking the generated xml file') sudo(("ln -s {basedir}/etc/solr.xml " "/etc/tomcat6/Catalina/localhost/solr.xml".format( basedir=config('basedir')) ), user='******')
def initial_create_databases(): """Create databases and/or db users.""" databases = config('databases') # TODO: Currently we only do the 'default' database # TODO: And we don't copy it from production to staging yet. for dbname in databases: if dbname != 'default': print("Skipped database '%s'." % (dbname,)) continue dbinfo = databases[dbname] if is_production_database(dbname): warning = "Database '{name}' on '{host} is a production database." print(red(warning.format(name=dbinfo['NAME'], host=['HOST']))) if confirm("Create user %s on %s?" % (dbinfo['USER'], dbinfo['HOST']), default=False): print(green("You will be prompted for a password. " + "The site wants to use %s .") % (dbinfo['PASSWORD'],)) print(green("\nIf it asks for another password, then that is " + "postgres'\npassword. On the staging server, that " + "is 'postgres'.\nIt might be different elsewhere.\n")) # Use warn-only so that the script doesn't halt if the user # exists already with settings(warn_only=True): local('createuser -h {host} -U postgres --pwprompt {user}'. format(host=dbinfo['HOST'], user=dbinfo['USER'])) create_individual_database(dbname)
def create_individual_database(dbname): """Create a single database. Used by initial_create_databases and copy_databases.""" dbinfo = config('databases')[dbname] if 'postgis' in dbinfo['ENGINE']: if confirm("Create database %s on %s with template postgis?" % ( dbinfo['NAME'], dbinfo['HOST']), default=False): print(green("The password required is that of user 'postgres'." + " Often equal to 'postgres'.")) cmd = ('createdb -h {host} -U postgres ' + '--template=template_postgis --owner={user} {database}') # Use warn-only so that the script doesn't halt if the db # exists already with settings(warn_only=True): local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'], database=dbinfo['NAME'])) else: if confirm("Create database %s on %s?" % ( dbinfo['NAME'], dbinfo['HOST']), default=False): print(green("The password required is that of user 'postgres'." + " Often equal to 'postgres'.")) cmd = ('createdb -h {host} -U postgres ' + '--owner={user} {database}') # Use warn-only so that the script doesn't halt if the db # exists already with settings(warn_only=True): local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'], database=dbinfo['NAME']))
def upload_data(filename): """Upload local media.tgz to var/ and extract it.""" base = "%s/var" % config('basedir') put(filename, base, use_sudo=True) with cd(base): sudo("tar xzf %s" % os.path.basename(filename), user='******') sudo("rm %s" % os.path.basename(filename))
def initial_nginx_symlinks(): """Make nginx symlinks into /etc as root.""" filename = config('domainname') + '.nginx.conf' source = os.path.join(config('basedir'), 'etc', filename) print("Linking %s" % source) target_available = os.path.join('/etc/nginx/sites-available/', filename) target_enabled = os.path.join('/etc/nginx/sites-enabled/', filename) sudo("rm -f %s" % target_available) sudo("ln -s %s %s" % (source, target_available)) sudo("rm -f %s" % target_enabled) sudo("ln -s %s %s" % (target_available, target_enabled)) if confirm("Reload nginx config?", default=True): reload_nginx()
def start_or_restart(): """(Re)starts supervisord, celery and gunicorn.""" with cd(config('basedir')): output = sudo("bin/supervisorctl status gunicorn", user='******') if 'refused' in output: print("Supervisor not running yet, starting it.") sudo("bin/supervisord", user='******') else: with settings(warn_only=True): print("Restarting all supervisor-controlled processes...") sudo("bin/supervisorctl restart all", user='******')
def copy_databases(drop_and_create_db=True): """Runs copy_individual_database for each applicable database.""" for dbname in config('databases'): # TODO Ignore databases other than default for now if dbname != 'default': print("Skipped database '%s'." % (dbname,)) continue if can_copy_database(dbname): copy_individual_database(dbname, drop_and_create_db) else: print("Can't copy database '%s'." % dbname)
def copy_var_data_from_production(): """Copies media from var/ on the production server to the staging or development environment.""" if can_copy_from_production(): if confirm("Copy var/ data from production?", default=False): filename = get_data_from_production_server() if config('deployment_type') == 'development': local("cd var && tar xzf %s" % filename) else: upload_data(filename) local("rm %s" % filename) else: print("Can't copy. Source and destination the same?")
def drop_database(dbname='default'): """Drop database.""" db = config('databases').get(dbname, None) if db is None: return if is_production_database(dbname): print(red("Database '{name}' on '{host}' is a production database!". format(name=db['NAME'], host=db['HOST']))) if confirm("Drop database %s on %s?" % ( db['NAME'], db['HOST']), default=False): cmd = 'dropdb -h {host} -U postgres {database}' with settings(warn_only=True): # Warn only so we don't abort if it doesn't exist yet local(cmd.format(host=db['HOST'], database=db['NAME']))
def copy_individual_database(dbname='default', drop_and_create_db=True): """Copies the production database(s) to the staging environment.""" if not can_copy_database(dbname): # Doublecheck in case this is called directly return from_db = production_config('databases')[dbname] to_db = config('databases')[dbname] if confirm(("Copy data for db {db_name} from {from_host}" + " to {to_host}?").format(from_host=from_db['HOST'], to_host=to_db['HOST'], db_name=to_db['NAME']), default=False): filename = '{dbname}_dump.sql'.format(dbname=from_db['NAME']) with cd('/tmp'): print(green("Password for {from_db_host} is {from_db_pw}.".format( from_db_host=from_db['HOST'], from_db_pw=from_db['PASSWORD']))) local('pg_dump -c -h {dbhost} -U {dbuser} {dbname} -f {fn}'.format( dbhost=from_db['HOST'], dbuser=from_db['USER'], dbname=from_db['NAME'], fn=filename)) if drop_and_create_db: drop_database(dbname) create_individual_database(dbname) print(green("Password for {to_db_host} is {to_db_pw}.".format( to_db_host=to_db['HOST'], to_db_pw=to_db['PASSWORD']))) local('psql -h {host} -U {user} {db_name} < {fn}'.format( host=to_db['HOST'], user=to_db['USER'], db_name=to_db['NAME'], fn=filename)) local('rm %s' % filename)
def initial_create_srv_dir(): """Create a checkout in /srv with correct ownership.""" if exists(config('basedir')): if not confirm( "Directory %s already exists. Really set it up anew?" % config('basedir'), default=False): return else: if (confirm("Delete old contents of %s on %s?" % (config('basedir'), env['host']), default=False)): if (confirm("Are you sure?", default=False)): sudo('rm -r %s' % config('basedir')) # Create a directory in /srv named after the hostname. sudo('mkdir -p %s' % config('basedir')) # Make buildout the owner. sudo('chown buildout:buildout %s' % config('basedir')) # And make a checkout of the latest tag *as user buildout*. sudo(initial_checkout_cmd(), user='******')
def switch_and_buildout(): """Checks out latest tag and runs buildout.""" if not exists(config('basedir')): abort( "Directory %s doesn't exist yet. Run 'create_srv_dir'." % config('basedir')) with cd(config('basedir')): # Confirm if config('checkout') == 'trunk': if not confirm("Switch to trunk on %s?" % env['host'], default=True): abort("Keeping previous checkout.") elif config('checkout') == 'tag': if not confirm("Switch to tag %s on %s?" % ( config('tag'), env['host']), default=True): abort("Keeping previous checkout.") else: abort("Config('checkout') should be either 'trunk' or 'tag'.") if config('svn'): if config('checkout') == 'tag': tag_url = config('vcs').tag_url(config('tag')) else: tag_url = config('vcs').tag_url('').replace('tags/', 'trunk/') print("Switching to %s" % tag_url) sudo("svn switch %s" % tag_url, user='******') if config('git'): sudo("git fetch", user='******') if config('checkout') == 'tag': sudo("git checkout %s" % config('tag'), user='******') else: sudo("git checkout master", user='******') sudo("git merge origin/master", user='******') check_is_symlink = ("python -c \"import os;" + "print(os.path.islink('buildout.cfg'))\"") if exists(os.path.join(config('basedir'), 'buildout.cfg')): if 'True' in sudo(check_is_symlink, user='******'): sudo('rm buildout.cfg') else: abort("buildout.cfg isn't a symlink.") sudo("ln -s %s buildout.cfg" % config('buildout-file'), user='******') if not exists(os.path.join(config('basedir'), 'bin', 'buildout')): print("bin/buildout doesn't exist yet. Running bootstrap.") sudo("python bootstrap.py", user='******') sudo("bin/buildout", user='******')
def solr_link(): print('SOLR: linking the generated xml file') sudo(("ln -s {basedir}/etc/solr.xml " "/etc/tomcat6/Catalina/localhost/solr.xml".format( basedir=config('basedir'))), user='******')
def check_srv_availability(self): if not exists(config('basedir')): print("Directory %s doesn't exist yet" % config('basedir')) print("Run fabric with the 'create_srv_dir' command.")