def sync_to_remote_db(roles="webapp_servers"): """ Retrieves a remote DB dump, wipes your local DB, and installs the remote copy in place. """ if _current_host_has_role(roles): dump_filename = db.util.get_db_dump_filename() dump_path = os.path.join(env.REMOTE_CODEBASE_PATH, dump_filename) with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_dump_db %s" % (env.REMOTE_VIRTUALENV_NAME, dump_filename)) get(dump_path, dump_filename) run("rm %s" % dump_filename) # In a multi-host environment, target hostname is appended by Fabric. # TODO: Make this use Fabric 1.0's improved get() when it's released. filename_with_hostname = "%s.%s" % (dump_filename, env["host"]) if os.path.exists(filename_with_hostname): # Move it back to what it should be. local("mv %s %s" % (filename_with_hostname, dump_filename)) local("./manage.py ft_restore_db %s" % dump_filename, capture=False) local("rm %s" % dump_filename) # Die after this to prevent executing this with more hosts. sys.exit(0)
def sync_to_remote_db(roles='webapp_servers'): """ Retrieves a remote DB dump, wipes your local DB, and installs the remote copy in place. """ if _current_host_has_role(roles): dump_filename = db.util.get_db_dump_filename() dump_path = os.path.join(env.REMOTE_CODEBASE_PATH, dump_filename) with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_dump_db %s" % (env.REMOTE_VIRTUALENV_NAME, dump_filename)) get(dump_path, dump_filename) run("rm %s" % dump_filename) # In a multi-host environment, target hostname is appended by Fabric. # TODO: Make this use Fabric 1.0's improved get() when it's released. filename_with_hostname = '%s.%s' % (dump_filename, env['host']) if os.path.exists(filename_with_hostname): # Move it back to what it should be. local('mv %s %s' % (filename_with_hostname, dump_filename)) local('./manage.py ft_restore_db %s' % dump_filename, capture=False) local('rm %s' % dump_filename) # Die after this to prevent executing this with more hosts. sys.exit(0)
def pip_update_reqs(roles=['webapp_servers', 'celery_servers']): """ Updates your virtualenv from requirements.txt. """ if _current_host_has_role(roles): print("=== UPDATING REQUIREMENTS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_pip_update_reqs" % env.REMOTE_VIRTUALENV_NAME)
def pip_update_reqs(roles=["webapp_servers", "celery_servers"]): """ Updates your virtualenv from requirements.txt. """ if _current_host_has_role(roles): print ("=== UPDATING REQUIREMENTS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_pip_update_reqs" % env.REMOTE_VIRTUALENV_NAME)
def supervisord_restart_all(roles="webapp_servers"): """ Restarts all of supervisord's managed programs. """ if _current_host_has_role(roles): print ("=== RESTARTING SUPERVISORD PROGRAMS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_supervisord_restart_prog" % env.REMOTE_VIRTUALENV_NAME)
def fabtastic_update(roles=["webapp_servers", "celery_servers"]): """ Updates your copy of django-fabtastic from the git repository. """ if _current_host_has_role(roles): print ("=== UPDATING FABTASTIC ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_fabtastic_update" % env.REMOTE_VIRTUALENV_NAME)
def compress(roles="webapp_servers"): """ Runs django-compressor's offline compression command. """ if _current_host_has_role(roles): print ("=== COMPRESSING STATIC MEDIA ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py compress --force" % env.REMOTE_VIRTUALENV_NAME)
def fabtastic_update(roles=['webapp_servers', 'celery_servers']): """ Updates your copy of django-fabtastic from the git repository. """ if _current_host_has_role(roles): print("=== UPDATING FABTASTIC ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_fabtastic_update" % env.REMOTE_VIRTUALENV_NAME)
def flush_cache(roles=["webapp_servers", "celery_servers"]): """ Flushes the cache. """ if _current_host_has_role(roles): print ("=== FLUSHING CACHE ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_clear_cache" % env.REMOTE_VIRTUALENV_NAME)
def flush_cache(roles=['webapp_servers', 'celery_servers']): """ Flushes the cache. """ if _current_host_has_role(roles): print("=== FLUSHING CACHE ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_clear_cache" % env.REMOTE_VIRTUALENV_NAME)
def compress(roles='webapp_servers'): """ Runs django-compressor's offline compression command. """ if _current_host_has_role(roles): print("=== COMPRESSING STATIC MEDIA ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py compress --force" % env.REMOTE_VIRTUALENV_NAME)
def mediasync_syncmedia(roles="webapp_servers"): """ Syncs the checked out git media with S3. """ if _current_host_has_role(roles) and not env.already_media_synced: print ("=== SYNCING STATIC MEDIA WITH S3 ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py syncmedia" % env.REMOTE_VIRTUALENV_NAME) env.already_media_synced = True
def supervisord_restart_all(roles='webapp_servers'): """ Restarts all of supervisord's managed programs. """ if _current_host_has_role(roles): print("=== RESTARTING SUPERVISORD PROGRAMS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_supervisord_restart_prog" % env.REMOTE_VIRTUALENV_NAME)
def collectstatic(roles='webapp_servers'): """ Syncs the checked out git media with S3. """ if _current_host_has_role(roles) and not env.already_media_synced: print("=== SYNCING STATIC MEDIA WITH S3 ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py collectstatic --noinput" % env.REMOTE_VIRTUALENV_NAME) env.already_media_synced = True
def south_migrate(): """ Migrates the DB schema with South. Sets already_db_migrated to prevent double migrations. """ if _current_host_has_role('webapp_servers') and not env.already_db_migrated: print("=== RUNNING SOUTH DB MIGRATIONS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py migrate" % env.REMOTE_VIRTUALENV_NAME) env.already_db_migrated = True
def git_pull(roles=['webapp_servers', 'celery_servers']): """ Pulls the latest master branch from the git repo. """ if _current_host_has_role(roles): print("=== PULLING FROM GIT ===") with cd(env.REMOTE_CODEBASE_PATH): run("git pull") # Remove .pyc files for modules that no longer exist. run("find . -name '*.pyc' -delete")
def south_migrate(): """ Migrates the DB schema with South. Sets already_db_migrated to prevent double migrations. """ if _current_host_has_role("webapp_servers") and not env.already_db_migrated: print ("=== RUNNING SOUTH DB MIGRATIONS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py migrate" % env.REMOTE_VIRTUALENV_NAME) env.already_db_migrated = True
def gunicorn_restart_workers(): """ Reloads gunicorn. This must be done to re-compile the code after a new revision has been checked out. """ if _current_host_has_role("webapp_servers"): print ("=== RESTARTING GUNICORN WORKERS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_gunicorn_restart" % env.REMOTE_VIRTUALENV_NAME) print ("Gunicorn reloaded")
def git_pull(roles=["webapp_servers", "celery_servers"]): """ Pulls the latest master branch from the git repo. """ if _current_host_has_role(roles): print ("=== PULLING FROM GIT ===") with cd(env.REMOTE_CODEBASE_PATH): run("git pull") # Remove .pyc files for modules that no longer exist. run("find . -name '*.pyc' -delete")
def supervisord_restart_prog(program, roles="webapp_servers"): """ Restarts all of supervisord's managed programs. :arg str program: The name of the program to restart (as per supervisor's conf.d/ contents). """ if _current_host_has_role(roles): print ("=== RESTARTING SUPERVISORD PROGRAMS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_supervisord_restart_prog %s" % (env.REMOTE_VIRTUALENV_NAME, program))
def gunicorn_restart_workers(): """ Reloads gunicorn. This must be done to re-compile the code after a new revision has been checked out. """ if _current_host_has_role('webapp_servers'): print("=== RESTARTING GUNICORN WORKERS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_gunicorn_restart" % env.REMOTE_VIRTUALENV_NAME) print("Gunicorn reloaded")
def supervisord_restart_prog(program, roles='webapp_servers'): """ Restarts all of supervisord's managed programs. :arg str program: The name of the program to restart (as per supervisor's conf.d/ contents). """ if _current_host_has_role(roles): print("=== RESTARTING SUPERVISORD PROGRAMS ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_supervisord_restart_prog %s" % (env.REMOTE_VIRTUALENV_NAME, program))
def celeryd_restart(roles="celery_servers"): """ Reloads celeryd. This must be done to re-compile the code after a new revision has been checked out. NOTE: This broadcasts a 'shutdown' call to all celery workers. You must have supervisor or something running to start them back up, or this ends up just being a shutdown (sans restart). """ if _current_host_has_role(roles): print ("=== RESTARTING CELERY DAEMON ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_celeryd_restart" % env.REMOTE_VIRTUALENV_NAME) print "Celery shutdown broadcasted, workers restarting."
def backup_db_to_s3(): """ Backs up the DB to Amazon S3. The DB server runs pg_dump, then uploads to S3 via the s3cmd command. On new DB instances, you'll need to run 's3cmd --configure' (as the user that will be running s3cmd) to setup the keys. You'll notice they aren't passed here as a result of that. """ if _current_host_has_role('webapp_servers'): print("=== BACKING UP DB TO S3 ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_backup_db_to_s3" % env.REMOTE_VIRTUALENV_NAME) print("DB backed up to S3.") # Die after this to prevent executing this with more hosts. sys.exit(0)
def backup_db_to_s3(): """ Backs up the DB to Amazon S3. The DB server runs pg_dump, then uploads to S3 via the s3cmd command. On new DB instances, you'll need to run 's3cmd --configure' (as the user that will be running s3cmd) to setup the keys. You'll notice they aren't passed here as a result of that. """ if _current_host_has_role("webapp_servers"): print ("=== BACKING UP DB TO S3 ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_backup_db_to_s3" % env.REMOTE_VIRTUALENV_NAME) print ("DB backed up to S3.") # Die after this to prevent executing this with more hosts. sys.exit(0)
def celeryd_restart(roles='celery_servers'): """ Reloads celeryd. This must be done to re-compile the code after a new revision has been checked out. NOTE: This broadcasts a 'shutdown' call to all celery workers. You must have supervisor or something running to start them back up, or this ends up just being a shutdown (sans restart). """ if _current_host_has_role(roles): print("=== RESTARTING CELERY DAEMON ===") with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_celeryd_restart" % env.REMOTE_VIRTUALENV_NAME) print "Celery shutdown broadcasted, workers restarting."
def get_remote_db(roles="webapp_servers"): """ Retrieves a remote DB dump and dumps it in your project's root directory. """ if _current_host_has_role(roles): dump_filename = db.util.get_db_dump_filename() dump_path = os.path.join(env.REMOTE_CODEBASE_PATH, dump_filename) with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_dump_db %s" % (env.REMOTE_VIRTUALENV_NAME, dump_filename)) get(dump_path, dump_filename) run("rm %s" % dump_filename) # In a multi-host environment, target hostname is appended by Fabric. # TODO: Make this use Fabric 1.0's improved get() when it's released. new_filename = "%s.%s" % (dump_filename, env["host"]) # Move it back to what it should be. local("mv %s %s" % (new_filename, dump_filename)) # Die after this to prevent executing this with more hosts. sys.exit(0)
def get_remote_db(roles='webapp_servers'): """ Retrieves a remote DB dump and dumps it in your project's root directory. """ if _current_host_has_role(roles): dump_filename = db.util.get_db_dump_filename() dump_path = os.path.join(env.REMOTE_CODEBASE_PATH, dump_filename) with cd(env.REMOTE_CODEBASE_PATH): run("workon %s && ./manage.py ft_dump_db %s" % (env.REMOTE_VIRTUALENV_NAME, dump_filename)) get(dump_path, dump_filename) run("rm %s" % dump_filename) # In a multi-host environment, target hostname is appended by Fabric. # TODO: Make this use Fabric 1.0's improved get() when it's released. new_filename = '%s.%s' % (dump_filename, env['host']) # Move it back to what it should be. local('mv %s %s' % (new_filename, dump_filename)) # Die after this to prevent executing this with more hosts. sys.exit(0)