def run(self, *args, **kwargs): self.validate_config() try: database = args[0] except IndexError: database = config['mysql']['server']['database'] user = config['mysql']['shell']['user'] host = config['mysql']['shell']['host'] port = config['mysql']['shell']['port'] filename = config['mysql']['shell']['dumpfile'] dbhost = config['mysql']['server']['host'] dbuser = config['mysql']['server']['user'] dbpassword = config['mysql']['server']['password'] env.host = host env.user = user env.port = port env.host_string = "%s@%s:%s" % (env.user, env.host, env.port) pretty_print('[+] Starting MySQL restore.', 'info') # env.use_ssh_config = True with hide('running'): pretty_print('Restoring to %s from %s' % (database, filename), 'info') run('mysql -u%s -p%s -h%s %s < %s' % (dbuser, dbpassword, dbhost, database, filename)) pretty_print('[+] MySQL restore finished.', 'info')
def config(): with cd('%s/etc/' % CURRENT_PATH): run('cp *.conf *.ini %s' % ETC_PATH) # sysctl with cd('%s/etc/' % CURRENT_PATH): run('cp *.conf *.ini %s' % ETC_PATH) with cd('/etc/sysctl.d'): sudo('rm -f boc.conf') sudo('ln -s %s/sysctl.conf boc.conf' % ETC_PATH) sudo('sysctl -f boc.conf') # nginx with cd('/etc/nginx/sites-available'): sudo('rm -f %s.conf' % NAME) sudo('ln -s %s/nginx.conf %s.conf' % (ETC_PATH, NAME)) with cd('/etc/nginx/sites-enabled'): sudo('rm -f %s.conf' % NAME) sudo('ln -s /etc/nginx/sites-available/%s.conf' % NAME) sudo('rm -f /etc/nginx/sites-enabled/default') sudo('/usr/sbin/nginx -s quit && sleep 5') sudo('/etc/init.d/uwsgi stop') sudo('/etc/init.d/nginx start') # memcached sudo('/etc/init.d/memcached stop') with cd('/etc'): sudo('rm -f memcached.conf') sudo('ln -s %s/memcached.conf' % ETC_PATH) sudo('/etc/init.d/memcached start') # uwsgi with cd('/etc/uwsgi/apps-available'): sudo('rm -f %s.ini' % NAME) sudo('ln -s %s/config.ini %s.ini' % (ETC_PATH, NAME)) with cd('/etc/uwsgi/apps-enabled'): sudo('rm -f %s.ini' % NAME) sudo('ln -s /etc/uwsgi/apps-available/%s.ini' % NAME) sudo('/etc/init.d/uwsgi start')
def release(release_dir): """ Release the current build activating it on the server. """ with cd(env.PROJECT.releases): run('rm -rf current') run('ln -s %s current' % release_dir)
def run(self, *args, **kwargs): self.validate_config() try: database = args[0] except IndexError: database = config['mysql']['server']['database'] user = config['mysql']['shell']['user'] host = config['mysql']['shell']['host'] port = config['mysql']['shell']['port'] dbhost = config['mysql']['server']['host'] dbuser = config['mysql']['server']['user'] dbpassword = config['mysql']['server']['password'] env.host = host env.user = user env.port = port env.host_string = "%s@%s:%s" % (env.user, env.host, env.port) pretty_print('[+] Starting MySQL clone.', 'info') new_database = '%s_%s' % (database, datetime.now().strftime("%Y%m%d_%H%M%S")) with hide('running'): MySQLDBDump(config).run(database) pretty_print('Creating new database: %s' % new_database) run('mysql -u%s -p%s -h%s %s <<< %s' % (dbuser, dbpassword, dbhost, database, '\"CREATE DATABASE %s\"' % new_database)) MySQLDBRestore(config).run(new_database) pretty_print('[+] MySQL clone finished.', 'info')
def setMaster(): if exists('/etc/hosts0'): print 'etc/hosts0 exists' else: sudo('cp /etc/hosts /etc/hosts0') sudo('rm /etc/hosts') sudo('cp /etc/hosts0 /etc/hosts') put('hosts') sudo('cat hosts|sudo tee -a /etc/hosts') run('rm hosts') run('cat /etc/hosts') path1 = '/home/{0}'.format(parm['USER']) rsync_project(path1, exclude=['result']) path2 = join(path1, basename(realpath('.'))) path3 = join(path2, parm['programdir']) for dst in (path2, path3): fi = '{0}/{1}'.format(dst, parm['keyfile']) if not exists(fi, use_sudo=True): put(parm['keyfile'], dst) sudo('chmod 400 {0}'.format(fi)) execute('genkey')
def gene_sqlite(): """生成sqlite文件,并通过邮件发送""" env.host_string = config.HOST_STRING with cd('/var/www/xichuangzhu'): with shell_env(MODE='PRODUCTION'): with prefix('source venv/bin/activate'): run('python manage.py gene_sqlite')
def dj(command, chdir=None): """ Run a Django manage.py command on the server. """ with cd(chdir or env.project_dir): run("{virtualenv_dir}/bin/python {project_dir}/manage.py {dj_command} " "--settings={project_conf}".format(dj_command=command, **env))
def site_install(path, db_user, db_pass, db_host, db_name): """Install a fresh Drupal site Use Drush to setup the Drupal structure in database Args: path: Directory of the website db_user: Database user to use when creating and running the Drupal site db_pass: That user's password db_host: Database host db_name: Database name """ db_url = 'mysql://%s:%s@%s/%s' % (db_user, db_pass, db_host, db_name) warning = """ WARNING: This is an inherently insecure method for interacting with the database since the database password will be written to the command line and will be visible to anyone who can access the .mysql_history. Additionally, while this command is being run the password is exposed to anyone who can run the ps command on the server. Unfortunately this is the only method that Drush currently supports. Do you still wish to proceed? """ confirm_overwrite(warning) with cd(path): run("drush site-install standard --db-url=" + db_url)
def _update_virtualenv(source_folder): virtualenv_folder = source_folder + '/../virtualenv' if not exists(virtualenv_folder + '/bin/pip'): run('virtualenv --python=python3 %s' % (virtualenv_folder,)) run('%s/bin/pip install -r %s/requirements.txt' % ( virtualenv_folder, source_folder ))
def setup_haproxy(debug=False): sudo('ufw allow 81') # nginx moved sudo('ufw allow 1936') # haproxy stats sudo('apt-get install -y haproxy') sudo('apt-get remove -y haproxy') with cd(env.VENDOR_PATH): run('wget http://haproxy.1wt.eu/download/1.5/src/devel/haproxy-1.5-dev17.tar.gz') run('tar -xf haproxy-1.5-dev17.tar.gz') with cd('haproxy-1.5-dev17'): run('make TARGET=linux2628 USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1') sudo('make install') put('config/haproxy-init', '/etc/init.d/haproxy', use_sudo=True) sudo('chmod u+x /etc/init.d/haproxy') sudo('mkdir -p /etc/haproxy') if debug: put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) else: put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), '/etc/haproxy/haproxy.cfg', use_sudo=True) sudo('echo "ENABLED=1" > /etc/default/haproxy') cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path)) run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path)) put('config/haproxy_rsyslog.conf', '/etc/rsyslog.d/49-haproxy.conf', use_sudo=True) sudo('restart rsyslog') sudo('/etc/init.d/haproxy stop') sudo('/etc/init.d/haproxy start')
def upgrade_django(): with cd(env.NEWSBLUR_PATH), settings(warn_only=True): sudo('supervisorctl stop gunicorn') run('./utils/kill_gunicorn.sh') sudo('easy_install -U django gunicorn') pull() sudo('supervisorctl reload')
def backup_postgresql(): # crontab for postgres master server # 0 4 * * * python /srv/newsblur/utils/backups/backup_psql.py # 0 * * * * sudo find /var/lib/postgresql/9.2/archive -mtime +1 -exec rm {} \; # 0 */4 * * * sudo find /var/lib/postgresql/9.2/archive -type f -mmin +360 -delete with cd(os.path.join(env.NEWSBLUR_PATH, 'utils/backups')): run('python backup_psql.py')
def setup_node_app(): sudo('add-apt-repository -y ppa:chris-lea/node.js') sudo('apt-get update') sudo('apt-get install -y nodejs') run('curl -L https://npmjs.org/install.sh | sudo sh') sudo('npm install -g supervisor') sudo('ufw allow 8888')
def staging_full(): with cd('~/staging'): run('git pull') run('./manage.py migrate') run('kill -HUP `cat logs/gunicorn.pid`') run('curl -s http://dev.newsblur.com > /dev/null') run('curl -s http://dev.newsblur.com/m/ > /dev/null')
def kill_celery(): with cd(env.NEWSBLUR_PATH): with settings(warn_only=True): if env.user == 'ubuntu': sudo('./utils/kill_celery.sh') else: run('./utils/kill_celery.sh')
def unpack(archive_path, temp_folder = '/tmp/build_temp'): "Unpacks the tarball into the correct place" print(green("Creating build folder")) # Create temp folder run('if [ -d "%s" ]; then rm -rf "%s"; fi' % (temp_folder,temp_folder)) run('mkdir -p %s' % temp_folder) with cd('%s' % temp_folder): run('tar xzf %s' % archive_path) # Create new build folder sudo('if [ -d "%(BuildRoot)s" ]; then rm -rf "%(BuildRoot)s"; fi' % env) sudo('mkdir -p %s' % env.BuildRoot) # Move src to build sudo('mv %s/src/* %s' % (temp_folder, env.BuildRoot)) # Create Application Configuration File create_parameters_ini() rename_htaccess() # Deleted Temporal Files and Directories run('rm -rf %s' % temp_folder) run('rm -f %s' % archive_path)
def kill(): sudo('supervisorctl reload') with settings(warn_only=True): if env.user == 'ubuntu': sudo('./utils/kill_gunicorn.sh') else: run('./utils/kill_gunicorn.sh')
def install_couchdb(): """ Installing Couchdb """ require.deb.packages([ 'erlang', 'libicu-dev', 'libmozjs-dev', 'libcurl4-openssl-dev' ]) with cd('/tmp'): run('wget http://apache.mirrors.multidist.eu/couchdb/'+ 'releases/1.2.0/apache-couchdb-1.2.0.tar.gz') run('tar -xzvf apache-couchdb-1.2.0.tar.gz') run('cd apache-couchdb-1.2.0; ./configure; make') sudo('cd apache-couchdb-1.2.0; make install') run('rm -rf apache-couchdb-1.2.0') run('rm -rf apache-couchdb-1.2.0.tar.gz') require.users.user("couchdb", home='/usr/local/var/lib/couchdb') sudo('chown -R couchdb:couchdb /usr/local/etc/couchdb') sudo('chown -R couchdb:couchdb /usr/local/var/lib/couchdb') sudo('chown -R couchdb:couchdb /usr/local/var/log/couchdb') sudo('chown -R couchdb:couchdb /usr/local/var/run/couchdb') sudo('chmod 0770 /usr/local/etc/couchdb') sudo('chmod 0770 /usr/local/var/lib/couchdb') sudo('chmod 0770 /usr/local/var/log/couchdb') sudo('chmod 0770 /usr/local/var/run/couchdb') require.supervisor.process('couchdb', user = '******', command = 'couchdb', autostart='true', environment ='HOME=/usr/local/var/lib/couchdb') print(green("CouchDB 1.2.0 successfully installed"))
def convert_file_to_raw(host, disk_format, filepath): with settings(host_string=host, connection_attempts=env.connection_attempts): with forward_agent(env.key_filename): run("qemu-img convert -f %s -O raw %s %s.tmp" % (disk_format, filepath, filepath)) run("mv -f %s.tmp %s" % (filepath, filepath))
def create_virtualenv(directory, system_site_packages=False, venv_python=None, use_sudo=False, user=None, clear=False, prompt=None, virtualenv_cmd='virtualenv'): """ Create a Python `virtual environment`_. :: import fabtools fabtools.python.create_virtualenv('/path/to/venv') .. _virtual environment: http://www.virtualenv.org/ """ options = ['--quiet'] if system_site_packages: options.append('--system-site-packages') if venv_python: options.append('--python=%s' % quote(venv_python)) if clear: options.append('--clear') if prompt: options.append('--prompt=%s' % quote(prompt)) options = ' '.join(options) directory = quote(directory) command = '%(virtualenv_cmd)s %(options)s %(directory)s' % locals() if use_sudo: sudo(command, user=user) else: run(command)
def sync_virtualenv(self): run('source ~/python-environments/markliu/bin/activate; \ pip install -r ' + self.remote_path + self.name + '/requirements.txt; \ add2virtualenv ' + self.remote_path + 'django-google-webmaster; \ add2virtualenv ' + self.remote_path + 'django-twitter-tags; \ add2virtualenv ' + self.remote_path + 'django-posterous; \ add2virtualenv ' + self.remote_path + 'coltrane-blog')
def _update_virtualenv(source_folder): virtualenv_folder = path.join(source_folder, '../virtualenv') if not exists(path.join(virtualenv_folder, 'bin', 'pip')): run('virtualenv %s' % (virtualenv_folder,)) run('%s/bin/pip install -r %s/requirements.txt' % ( virtualenv_folder, source_folder ))
def get_all_gpfs_state(): """ Get the GPFS state on all nodes @return all_state: dictionary in format: { 'node_name' : 'state' } """ f = StringIO.StringIO() all_state = {} with settings( hide('running'), output_prefix='', warn_only=True ): run("mmgetstate -a", stdout=f) for line in f.getvalue().splitlines(): if any(regex.match(line) for regex in _LINE_REGEX): continue else: lf = ' '.join(line.split()).split() node_short_name = lf[1] gpfs_state = lf[2] all_state[node_short_name] = gpfs_state return all_state
def only_one(name): env.user = '******' prod_prepare() prod_sync() with cd(REMOTE_DIR): run('docker-compose build {0}'.format(name)) run('docker-compose up -d --no-deps {0}'.format(name))
def get_managers(self): """ Get the current cluster/filesystem managers at any point in time, and updates the self.state dictionary """ f = StringIO.StringIO() run('mmlsmgr', stdout=f) for line in f.getvalue().splitlines(): if any(regex.match(line) for regex in _LINE_REGEX): continue elif re.match('Cluster manager node: ', line): clusterman = line.split()[-1].strip('(').strip(')') self.state['managers']['cluster'] = clusterman # this should get the filesystem manager lines else: fs = line.split()[0] fsman = line.split()[-1].strip('(').strip(')') self.state['managers'][fs] = fsman return
def setup_env(): site = env.opts['site'] if not exists('/home/{0}/.virtualenvs/{1}'.format(env.user, site)): with settings(warn_only=True): run('mkdir .virtualenvs') run('virtualenv .virtualenvs/{0} --no-site-packages'.format(site))
def supervisor_restart(): """ Restarts the supervisor on the web instances. """ with cd(env.deploy_dir): with prefix("source /usr/local/bin/virtualenvwrapper.sh && workon venv"): run("supervisorctl -c codalab/config/generated/supervisor.conf restart all")
def mount_fs_on_all_active(filesystem, node_states): """ Mount a GPFS filesystem on all active nodes @param node_states: dict of nodes with 'active' gpfs state @type node_states: dict @return NOTHING """ active_nodes = [] # add all active nodes to a list for k,v in node_states.iteritems(): if v == 'active': active_nodes.append(k) else: pass # create a comma separated list of nodes in active state active_nodes_string = ','.join(active_nodes) # run the command with hide('everything'): run("mmmount {0} -N {1}".format(filesystem, active_nodes_string)) return
def _bootstrap_master(bootstrap_dir_path): abs_bootstrap_dir_path = get_abs_path(bootstrap_dir_path) _bootstrap(abs_bootstrap_dir_path) dir_name = os.path.basename(abs_bootstrap_dir_path) run("nohup locust -f /tmp/locust/{0}/locustfile.py \ --master >~/locust-log.txt 2>&1 < /dev/null &".format(dir_name), pty=False)
def install_requirements(filename, upgrade=False, use_mirrors=False, use_sudo=False, user=None, download_cache=None, quiet=False, pip_cmd='pip'): """ Install Python packages from a pip `requirements file`_. :: import fabtools fabtools.python.install_requirements('project/requirements.txt') .. _requirements file: http://www.pip-installer.org/en/latest/requirements.html """ options = [] if use_mirrors: options.append('--use-mirrors') if upgrade: options.append('--upgrade') if download_cache: options.append('--download-cache="%s"' % download_cache) if quiet: options.append('--quiet') options = ' '.join(options) command = '%(pip_cmd)s install %(options)s -r %(filename)s' % locals() if use_sudo: sudo(command, user=user, pty=False) else: run(command, pty=False)
def run_install_scripts(): with cd('$HOME/src/{0}/scripts'.format(PROJECT_NAME)): run('git pull origin master') run('cp deploy-website.sh $HOME/bin/{0}'.format(FILE_DEPLOY_WEBSITE)) run('cp mysql-backup.sh $HOME/bin/{0}'.format(FILE_MYSQL_BACKUP)) run('cp pg-backup.sh $HOME/bin/{0}'.format(FILE_PG_BACKUP)) run('cp locale-backup.sh $HOME/bin/{0}'.format(FILE_LOCALE_BACKUP)) run('cp restart-apache.sh $HOME/bin/{0}'.format(FILE_RESTART_APACHE)) run('cp django-cleanup.sh $HOME/bin/{0}'.format(FILE_DJANGO_CLEANUP)) run('cp script-settings.sh $HOME/bin/{0}'.format(FILE_SCRIPT_SETTINGS)) run('cp crontab.txt $HOME/bin/{0}'.format(FILE_CRONTAB)) run('cp {0} $HOME/bin/{0}'.format(FILE_SHOW_MEMORY)) # This one goes to $HOME run('cp .pgpass $HOME/{0}'.format(FILE_PGPASS)) with cd('$HOME/bin'): sed(FILE_SCRIPT_SETTINGS, 'INSERT_USERNAME', fab_settings.ENV_USER) sed(FILE_SCRIPT_SETTINGS, 'INSERT_DB_USER', fab_settings.DB_USER) sed(FILE_SCRIPT_SETTINGS, 'INSERT_DB_NAME', fab_settings.DB_NAME) sed(FILE_SCRIPT_SETTINGS, 'INSERT_DB_PASSWORD', fab_settings.DB_PASSWORD) sed(FILE_SCRIPT_SETTINGS, 'INSERT_PROJECT_NAME', PROJECT_NAME) sed(FILE_SCRIPT_SETTINGS, 'INSERT_DJANGO_APP_NAME', fab_settings.DJANGO_APP_NAME) sed(FILE_SCRIPT_SETTINGS, 'INSERT_VENV_NAME', fab_settings.VENV_NAME) sed(FILE_DEPLOY_WEBSITE, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_MYSQL_BACKUP, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_PG_BACKUP, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_LOCALE_BACKUP, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_RESTART_APACHE, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_DJANGO_CLEANUP, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_CRONTAB, 'INSERT_PROJECTNAME', PROJECT_NAME) sed(FILE_SHOW_MEMORY, 'INSERT_PROJECTNAME', PROJECT_NAME) run('rm -f *.bak') with cd('$HOME'): sed(FILE_PGPASS, 'INSERT_DB_NAME', fab_settings.DB_NAME) sed(FILE_PGPASS, 'INSERT_DB_USER', fab_settings.DB_USER) sed(FILE_PGPASS, 'INSERT_DB_PASSWORD', fab_settings.DB_PASSWORD)
def deploy(): 'triggers svn up on the server' print(green('deploying...')) push() 'Deploy the app to the target environment' run('svn up /data/www/oiad.dev.upandrunningsoftware.com/htdocs/')
def do_deploy(archive_path): """ distributes a packed archive to web servers """ if path.exists(archive_path) is False: return False print(env.port) alt_path = archive_path[9:] arch_folder = "/data/web_static/releases/" + alt_path[:-4] new_file = "/tmp/" + alt_path try: put(archive_path, "/tmp/") run("sudo mkdir -p {}".format(arch_folder)) run("sudo tar -xzf {} -C {}".format(new_file, arch_folder)) run("sudo rm -rf {}".format(new_file)) run("sudo mv {}/web_static/*{}".format(arch_folder, arch_folder)) run("sudo rm -rf {}/web_static".format(arch_folder)) run("sudo rm -rf /data/web_static/current") run("sudo ln -sf {} /data/web_static/current".format(arch_folder)) return True except: return False
def extract_webapp(build_number): with cd(dist_base_dir + '/dist/' + build_number): run('unzip message-gateway.war -d ./deploy')
def extract_config(build_number): with cd(dist_base_dir + '/dist/' + build_number): run('tar xvf message-gateway-config.tar')
def run_delete_django(): with cd('$HOME/webapps/{0}/lib/python2.7/'.format( fab_settings.DJANGO_APP_NAME)): run('rm -rf django') run('rm -rf Django*')
def run_loaddata_auth(): with cd('$HOME/webapps/{0}/project/'.format(fab_settings.DJANGO_APP_NAME)): run('workon {0} && ./manage.py loaddata bootstrap_auth.json'.format( fab_settings.VENV_NAME))
def run_install_mercurial(): with cd('$HOME'): run('easy_install-2.7 mercurial')
def run_delete_index_files(): run('rm -f $HOME/webapps/{0}/index.html'.format( fab_settings.MEDIA_APP_NAME)) run('rm -f $HOME/webapps/{0}/index.html'.format( fab_settings.STATIC_APP_NAME))
def run_install_requirements(): run('workon {0} && pip install -r $HOME/src/{1}/website/webapps/django/' 'project/requirements.txt --upgrade'.format(fab_settings.VENV_NAME, PROJECT_NAME))
def restart(): run("cd %s && forever restart deploy/hookshot.js -p %i -b %s -c \"%s\"" % (current, port, branch, command))
def run_delete_previous_attempts(): run('rm -rf $HOME/webapps/{0}/project'.format( fab_settings.DJANGO_APP_NAME)) run('rm -rf $HOME/Envs/{0}/'.format(fab_settings.VENV_NAME)) run('rm -rf $HOME/src/{0}/'.format(PROJECT_NAME)) run('rm -rf $HOME/bin/*{0}*.*'.format(PROJECT_NAME)) with cd('$HOME'): run('touch .pgpass') run("sed '/{0}/d' .pgpass > .pgpass_tmp".format(fab_settings.DB_NAME)) run('mv .pgpass_tmp .pgpass') run('crontab -l > crontab_bak') run("sed '/{0}.sh/d' crontab_bak > crontab_tmp".format( fab_settings.PROJECT_NAME)) run('crontab crontab_tmp') run('rm crontab_tmp')
def start(): run("cd %s && forever start -l %s -a deploy/hookshot.js -p %i -b %s -c \"%s\"" % (current, log, port, branch, command))
def run_create_virtualenv(): with cd('$HOME'): run('rm -rf $HOME/Envs/{0}'.format(fab_settings.VENV_NAME)) run('mkvirtualenv -p python2.7 --system-site-packages {0}'.format( fab_settings.VENV_NAME))
def install_base_python_packages(): run("sudo apt-get -y -q install python-pip") run("sudo pip install --upgrade pip") run("sudo pip install virtualenv")
def stop(): run("cd %s && forever stop deploy/hookshot.js -p %i -b %s -c \"%s\"" % (current, port, branch, command))
def collect_static_files(): with prefix('source %(virtualenv_dir)s/bin/activate' % env): with cd(env.project_code_dir): run("python manage.py collectstatic %(prod_settings)s" % env)
def install_project_python_packages(): with prefix('source %(virtualenv_dir)s/bin/activate' % env): with cd(env.project_code_dir): run("pip install -r requirements.txt")
def remote_git_pull(): with cd(env.project_code_dir): run("git stash") run("git pull")
def install_required_software(): run("sudo apt-get -y -q install python-dev") # Needed to avoid GCC compilation error for pycrypto run("sudo apt-get -y -q install nginx") run("sudo apt-get -y -q install git") run("sudo apt-get -y -q install binutils libproj-dev gdal-bin") # Geospatial libraries run("sudo apt-get -y -q install postgresql postgresql-contrib python-psycopg2") run("sudo apt-get -y -q install postgis postgresql-9.3-postgis-2.1") run("sudo apt-get -y -q install build-essential libxml2-dev libgdal-dev libproj-dev libjson0-dev xsltproc docbook-xsl docbook-mathml") # PostGIS run("sudo apt-get -y -q install redis-server") # Will restart on reboot
def configure_nginx(): run("sudo rm /etc/nginx/sites-enabled/default") run("sudo rm /etc/nginx/sites-available/default") run("sudo cp -f %(deploy_dir)s/nginx.conf /etc/nginx/" % env) sudo("sudo service nginx restart")
def create_virtualenv(): # Set up virtualenv and change write permissions run("sudo virtualenv %(virtualenv_dir)s" % env) run("sudo chmod -R 777 %(virtualenv_dir)s" % env)
def install_supervisor(): with prefix('source %(virtualenv_dir)s/bin/activate' % env): run("pip install supervisor --pre") run("sudo cp -f %(deploy_dir)s/supervisorstart.conf /etc/init/" % env)
def clone_project(): run("sudo git clone %(github_url)s" % env) # First install of code run("sudo chmod -R 777 %(project_dir)s" % env) # Set permissions
def run_prod_migrations(): with prefix('source %(virtualenv_dir)s/bin/activate' % env): with cd(env.project_code_dir): run("python manage.py migrate %(prod_settings)s" % env)
def reboot_remote_host(): run("sudo reboot")
def set_up_database(): run("sudo psql -U postgres -c \"CREATE DATABASE %(prod_postgres_database)s;\"" % env) run("sudo psql -U postgres -c \"CREATE USER %(prod_postgres_user)s WITH PASSWORD '%(prod_postgres_password)s';\"" % env) run("sudo psql -U postgres <<EOF\n\c %(prod_postgres_database)s\nCREATE EXTENSION postgis;\nEOF" % env) run("sudo psql -U postgres <<EOF\n\c %(prod_postgres_database)s\nCREATE EXTENSION postgis_topology;\nEOF" % env) run("sudo psql -U postgres -c \"GRANT ALL PRIVILEGES ON DATABASE %(prod_postgres_database)s TO %(prod_postgres_user)s;\"" % env)
def create_superuser(): with prefix('source %(virtualenv_dir)s/bin/activate' % env): with cd(env.project_code_dir): run("python manage.py createsuperuser %(prod_settings)s" % env)
def restart_postgres(): run("sudo service postgresql restart")
def upload_secrets(): put(env.local_secrets_path, env.http_dir) run('sudo cp %(http_dir)s/secrets.py %(remote_secrets_path)s' % env)