def install(): ''' Install everything from scratch (idempotent). ''' # Install Linux packages sudo('apt-get install -y git xclip python-virtualenv virtualenvwrapper zip ruby') sudo('apt-get install -y python-dev libmysqlclient-dev libjpeg-dev') sudo('apt-get install -y nginx supervisor') # Install Node.js # https://nodejs.org/en/download/package-manager/#debian-and-ubuntu-based-linux-distributions sudo('curl -sL https://deb.nodesource.com/setup_4.x | bash -') sudo('apt-get install -y nodejs') # Setup repositories def ensure_repo_exists(repo, dest): run('[ -e %s ] || git clone %s %s' % (dest, repo, dest)) ensure_repo_exists('https://github.com/codalab/codalab-worksheets', env.deploy_codalab_worksheets_dir) ensure_repo_exists('https://github.com/codalab/codalab-cli', env.deploy_codalab_cli_dir) # Initial setup with cd(env.deploy_codalab_worksheets_dir): run('git checkout %s' % env.git_codalab_tag) run('./setup.sh') with cd(env.deploy_codalab_cli_dir): run('git checkout %s' % env.git_codalab_cli_tag) run('./setup.sh server') run('venv/bin/pip install MySQL-Python') # Deploy! _deploy()
def deploy(): with prefix('source $(which virtualenvwrapper.sh) && workon remote'): settings_file = '--settings=haxclub.settings.base' env_vars = config.get('env_vars') if not exists('~/haxclub'): with cd('~/'): run('git clone https://github.com/jsalva/haxclub') with cd('~/haxclub/haxclub'): if not exists('logs'): run('mkdir logs') run('git pull origin master') with shell_env(**env_vars): prompts = [] prompts += expect("Type 'yes' to continue","yes") with expecting(prompts): erun('python manage.py collectstatic %s' % settings_file) erun('python manage.py migrate %s' % settings_file) erun('python manage.py syncdb %s' % settings_file) if exists('supervisord.pid'): erun('python manage.py supervisor reload %s' % settings_file) else: erun('python manage.py supervisor --daemonize %s' % settings_file) if not exists('/tmp/nginx'): run('mkdir /tmp/nginx') put('nginx.conf','/etc/nginx/nginx.conf',use_sudo=True) put('nginx_haxclub.conf','/etc/nginx/conf.d/nginx_haxclub.conf',use_sudo=True) put('ssl/haxclub.key.nopass','/etc/ssl/certs/haxclub.key.nopass',use_sudo=True) put('ssl/haxclub.crt','/etc/ssl/certs/haxclub.crt',use_sudo=True) put('nginx_haxclub.conf','/etc/nginx/conf.d/nginx_haxclub.conf',use_sudo=True) sudo('service nginx stop; service nginx start;')
def setup_haproxy(debug=False): sudo('ufw allow 81') # nginx moved sudo('ufw allow 1936') # haproxy stats sudo('apt-get install -y haproxy') sudo('apt-get remove -y haproxy') with cd(env.VENDOR_PATH): run('wget http://haproxy.1wt.eu/download/1.5/src/devel/haproxy-1.5-dev17.tar.gz') run('tar -xf haproxy-1.5-dev17.tar.gz') with cd('haproxy-1.5-dev17'): run('make TARGET=linux2628 USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1') sudo('make install') put('config/haproxy-init', '/etc/init.d/haproxy', use_sudo=True) sudo('chmod u+x /etc/init.d/haproxy') sudo('mkdir -p /etc/haproxy') if debug: put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) else: put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), '/etc/haproxy/haproxy.cfg', use_sudo=True) sudo('echo "ENABLED=1" > /etc/default/haproxy') cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path)) run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path)) put('config/haproxy_rsyslog.conf', '/etc/rsyslog.d/49-haproxy.conf', use_sudo=True) sudo('restart rsyslog') sudo('/etc/init.d/haproxy stop') sudo('/etc/init.d/haproxy start')
def deploy(branch='master'): uuid = str(uuid4()) with api.settings(warn_only=True): api.run('rm -f {}/demotime-previous'.format(REMOTE_ROOT)) api.run('mv {}/demotime-current {}/demotime-previous'.format( REMOTE_ROOT, REMOTE_ROOT) ) dir_name = '' package_url = 'https://github.com/f4nt/demotime/archive/{}.zip'.format(branch) # Place our code and install reqs with api.cd(REMOTE_BUILD_DIR): dir_name = 'demotime-{}'.format(uuid) api.run('wget {}'.format(package_url)) api.run('unzip {}.zip'.format(branch)) api.run('mv demotime-{} {}'.format(branch, dir_name)) api.run('{}/bin/pip install -r {}/requirements.txt'.format(REMOTE_ROOT, dir_name)) api.run('{}/bin/pip install -r {}/prod_requirements.txt'.format(REMOTE_ROOT, dir_name)) # Uninstall and reinstall demotime api.run('{}/bin/pip uninstall demotime -y'.format(REMOTE_ROOT)) api.run('{}/bin/pip install {}/demotime'.format(REMOTE_ROOT, dir_name)) # Cleanup api.run('rm -f {}.zip'.format(branch)) # Create our symlinks with api.cd(REMOTE_ROOT): api.run('ln -s {}/{} demotime-current'.format(REMOTE_BUILD_DIR, dir_name)) # App setup stuff with api.cd(os.path.join(REMOTE_BUILD_DIR, dir_name, 'dt')): api.run('cd dt && ln -s {}/prod_settings.py .'.format(REMOTE_ROOT)) api.run('DT_PROD=true {}/bin/python manage.py collectstatic --noinput'.format(REMOTE_ROOT)) api.run('DT_PROD=true {}/bin/python manage.py migrate'.format(REMOTE_ROOT)) api.run('touch --no-dereference /etc/uwsgi/sites/demotime.ini')
def setup_redis(slave=False): redis_version = '2.6.16' with cd(env.VENDOR_PATH): run('wget http://download.redis.io/releases/redis-%s.tar.gz' % redis_version) run('tar -xzf redis-%s.tar.gz' % redis_version) run('rm redis-%s.tar.gz' % redis_version) with cd(os.path.join(env.VENDOR_PATH, 'redis-%s' % redis_version)): sudo('make install') put('config/redis-init', '/etc/init.d/redis', use_sudo=True) sudo('chmod u+x /etc/init.d/redis') put('config/redis.conf', '/etc/redis.conf', use_sudo=True) if slave: put('config/redis_slave.conf', '/etc/redis_server.conf', use_sudo=True) else: put('config/redis_master.conf', '/etc/redis_server.conf', use_sudo=True) # sudo('chmod 666 /proc/sys/vm/overcommit_memory', pty=False) # run('echo "1" > /proc/sys/vm/overcommit_memory', pty=False) # sudo('chmod 644 /proc/sys/vm/overcommit_memory', pty=False) sudo("su root -c \"echo \\\"1\\\" > /proc/sys/vm/overcommit_memory\"") sudo("sysctl vm.overcommit_memory=1") sudo('mkdir -p /var/lib/redis') sudo('update-rc.d redis defaults') sudo('/etc/init.d/redis stop') sudo('/etc/init.d/redis start') setup_syncookies() config_monit_redis()
def deploy(): env.release = datetime.datetime.now().strftime('%Y%m%d%H%M%S') run('mkdir -p {path}/releases {path}/packages'.format(**env)) local('git archive --format=tar master | gzip > {release}.tar.gz'.format(**env)) put('{release}.tar.gz'.format(**env), '{path}/packages/'.format(**env)) local('rm -vf {release}.tar.gz'.format(**env)) with cd(env.path): run('mkdir -p releases/{release}'.format(**env)) with cd('releases/{release}'.format(**env)): run('tar xvf ../../packages/{release}.tar.gz'.format(**env)) run('ln -sf {dbpath} grouphugs.db'.format(**env)) with cd('{path}/releases'.format(**env)): with settings(warn_only=True): run('rm previous') run('mv current previous') run('ln -sf {release} current'.format(**env)) put('settings.py', '{path}/releases/{release}/settings.py'.format(**env)) restart()
def initialize_project(self): for worker, master in zip(self.cluster_spec.workers, self.cluster_spec.yield_masters()): state.env.host_string = worker run('killall -9 celery', quiet=True) for bucket in self.buckets: logger.info('Intializing remote worker environment') qname = '{}-{}'.format(master.split(':')[0], bucket) temp_dir = '{}-{}'.format(self.temp_dir, qname) r = run('test -d {}'.format(temp_dir), warn_only=True, quiet=True) if r.return_code == 0: if self.reuse_worker == 'true': return logger.error('Worker env exists, but reuse not specified') sys.exit(1) run('mkdir {}'.format(temp_dir)) with cd(temp_dir): run('git clone {}'.format(REPO)) with cd('{}/perfrunner'.format(temp_dir)): run('virtualenv -p python2.7 env') run('PATH=/usr/lib/ccache:/usr/lib64/ccache/bin:$PATH ' 'env/bin/pip install ' '--download-cache /tmp/pip -r requirements.txt')
def deploy_web(): """ Installs the output of the build on the web instances. """ require("configuration") if exists(env.deploy_dir): run("rm -rf %s" % env.deploy_dir) run("tar -xvzf %s" % env.build_archive) run("mv %s deploy" % env.git_tag) run("source /usr/local/bin/virtualenvwrapper.sh && mkvirtualenv venv") env.SHELL_ENV = dict( DJANGO_SETTINGS_MODULE=env.django_settings_module, DJANGO_CONFIGURATION=env.django_configuration, CONFIG_HTTP_PORT=env.config_http_port, CONFIG_SERVER_NAME=env.config_server_name, ) print env.SHELL_ENV with cd(env.deploy_dir): with prefix("source /usr/local/bin/virtualenvwrapper.sh && workon venv"), shell_env(**env.SHELL_ENV): requirements_path = "/".join(["codalab", "requirements", "dev_azure_nix.txt"]) pip_cmd = "pip install -r {0}".format(requirements_path) run(pip_cmd) # additional requirements for bundle service run("pip install SQLAlchemy simplejson") with cd("codalab"): run("python manage.py config_gen") run("mkdir -p ~/.codalab && cp ./config/generated/bundle_server_config.json ~/.codalab/config.json") run("python manage.py syncdb --migrate") run("python scripts/initialize.py") run("python manage.py collectstatic --noinput") sudo("ln -sf `pwd`/config/generated/nginx.conf /etc/nginx/sites-enabled/codalab.conf") sudo("ln -sf `pwd`/config/generated/supervisor.conf /etc/supervisor/conf.d/codalab.conf")
def deploy(): backup() with cd(env.directory): run('git checkout master') run('git fetch') run('git reset --hard origin/master') run("find . -name '*.pyc' -delete") virtualenv('pip install -r requirements.pip') virtualenv('python manage.py collectstatic --noinput') virtualenv('python manage.py syncdb') virtualenv('python manage.py migrate') virtualenv('python manage.py rebuild_index --noinput') # Copy across our supervisor configuration, as it may have changed. run('cp configuration/picky.conf /etc/supervisor/conf.d/') restart() with cd(env.directory): run('cp configuration/wiki.wilfred.me.uk /etc/nginx/sites-enabled/') sudo("service nginx reload", shell=False)
def _rgit_pull(name, gitsrc, add_pythonpath, branch=None): """ Pull or clone the given branch of gitsrc into ~/cvs/`name`. """ CVS = "~/cvs" # don't expand locally clone = True join = os.path.join with settings(warn_only=True): if run("test -d %s" % join(CVS, name)).succeeded: clone = False if clone: run('mkdir -p %s' % CVS) with cd(CVS): run("git clone %(gitsrc)s %(name)s" % locals()) else: with cd(join(CVS, name)): run("git pull") with cd(join(CVS, name)): if branch is not None and branch != get_rgit_branch(): run("pwd") run("git checkout -b %(branch)s origin/%(branch)s" % locals()) PYTHONPATH = join(CVS, 'PYTHONPATH') if add_pythonpath: run('mkdir -p %s' % PYTHONPATH) run('rm -f %s' % join(PYTHONPATH, add_pythonpath)) run('ln -s %s %s' % ( join(CVS, name, add_pythonpath), join(PYTHONPATH, add_pythonpath)))
def bootstrap(): """ initialize remote host environment (virtualenv, deploy, update) """ require('root', provided_by=env.deployments) print magenta("Cloning Repository") with cd(env.root): run("git clone %s" % env.git_repository) # some one time setup things with cd(env.project_root): if env.git_branch != 'master': run('git checkout %s' % (env.git_branch,)) run('mkdir static') run('mkdir media') with cd(env.code_root): run('ln -sf settings_%s.py settings.py' % env.environment) # create virtualenv and install all the requirements execute('create_virtualenv') execute('update_requirements') execute('create_database') execute('syncdb') execute('migrate') print magenta("Load initial data") with cd(env.project_root), prefix('source env/bin/activate'): run('./manage.py loaddata allink_user.json') # only compile messages if locale folder is present if os.path.isdir('locale'): execute('compilemessages') execute('collectstatic')
def install_galaxy_vcr(env): with cd("~"): print("Installing galaxy VCR tools (python and xml scripts).") sudo("git clone git://github.com/JCVI-Cloud/galaxy-tools-vcr.git") sudo("cp -R galaxy-tools-vcr/tools/viral_assembly_annotation %s" % galaxy_VCR_path) sudo("chown -R galaxy:galaxy %s" % galaxy_VCR_path) with cd(galaxy_central): print("Adding VCR to tool_conf.xml.") tcx_file = "tool_conf.xml" _set_pre_VCR(tcx_file,"galaxy","galaxy") tcx_string = _get_file_string(tcx_file,galaxy_central) vcr_header = "<section name=\"Viral Assembly and Annotation\" id=\"viral_assembly_annotation\">\"" if (tcx_string.find(vcr_header) != -1): print("Galaxy VCR tools already included in tools_conf.xml!") else: sudo("sed -i '$d' %s/%s" % (galaxy_central,tcx_file)) sudo("echo -e ' <section name=\"Viral Assembly and Annotation\" id=\"viral_assembly_annotation\">' >> %s" % tcx_file) sudo("echo -e ' <tool file=\"viral_assembly_annotation/viral_assembly.xml\" />' >> %s" % tcx_file) sudo("echo -e ' <tool file=\"viral_assembly_annotation/VIGOR.xml\" />' >> %s" % tcx_file) sudo("echo -e ' </section>' >> %s" % tcx_file) sudo("echo -e '</toolbox>' >> %s" % tcx_file) print("Adding 'sanitize_all_html = False' to universe_wsgi.ini to enable JBrowse for VICVB.") uwi_file = "universe_wsgi.ini" _set_pre_VCR(uwi_file,"galaxy","galaxy") uwi_string = _get_file_string(uwi_file,galaxy_central) if (uwi_string.find("sanitize_all_html") != -1): print("Setting sanitize_all_html in %s to False." % uwi_file) sudo("sed -i '/^sanitize_all_html/c\sanitize_all_html = False' %s" % uwi_file) else: print("No sanitize_all_html present! Adding...") sudo("sed -i '/^\[app:main\]/a\\\nsanitize_all_html = False' %s" % uwi_file)
def setup_celery_backend(rds_host, user_key, user_secret): ''' The real configuration happens here. ''' logging.info('Updating Ubuntu\'s repository index') sudo('apt-get update') # Not sure why, but sometimes I get "E: Unable to locate package git" # trying to solve this with a sleep. time.sleep(2) sudo('apt-get update') logging.info('Installing ubuntu packages') for pkg in ['git', 'python-pip', 'joe', 'python-mysqldb', 'supervisor']: sudo('apt-get install -y -q %s' % pkg) logging.info('Getting celery application source code') with cd('/tmp/'): sudo('ssh-keyscan -H github.com > /root/.ssh/known_hosts') put(DEPLOY_PRIVATE_PATH, '/root/.ssh/id_rsa', use_sudo=True) put(DEPLOY_PUBLIC_PATH, '/root/.ssh/id_rsa.pub', use_sudo=True) sudo('chmod 600 /root/.ssh/id_rsa') sudo('chmod 600 /root/.ssh/id_rsa.pub') sudo('git clone %s' % VULNWEB_REPO) logging.info('Installing requirements.txt (this takes time!)') with cd('/tmp/nimbostratus-target/'): sudo('git checkout %s' % VULNWEB_BRANCH) sudo('pip install --use-mirrors --upgrade -r requirements.txt') vulnweb_root = '/tmp/nimbostratus-target/servers/django_frontend/vulnweb' logging.info('Configuring django-celery application') # Overwrite the application configuration files upload_template('servers/celery_backend/broker.config', '%s/vulnweb/broker.py' % vulnweb_root, context={'access': user_key, 'secret': user_secret}, backup=False, use_sudo=True) upload_template('servers/celery_backend/databases.config', '%s/vulnweb/databases.py' % vulnweb_root, context={'user': LOW_PRIV_USER, 'password': LOW_PRIV_PASSWORD, 'host': rds_host}, backup=False, use_sudo=True) upload_template('servers/celery_backend/supervisor.config', '/etc/supervisor/conf.d/celery.conf', context={'django_root_path': vulnweb_root}, backup=False, use_sudo=True) sudo('supervisorctl update') with cd(vulnweb_root): sudo('python manage.py syncdb --noinput')
def vcs_upload(): """ Uploads the project with the selected VCS tool. """ if env.deploy_tool == "git": remote_path = "ssh://%s@%s%s" % (env.user, env.host_string, env.repo_path) if not exists(env.repo_path): run("mkdir -p %s" % env.repo_path) with cd(env.repo_path): run("git init --bare") local("git push -f %s master" % remote_path) with cd(env.repo_path): run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path) run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path) elif env.deploy_tool == "hg": remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string, env.repo_path) with cd(env.repo_path): if not exists("%s/.hg" % env.repo_path): run("hg init") print(env.repo_path) with fab_settings(warn_only=True): push = local("hg push -f %s" % remote_path) if push.return_code == 255: abort() run("hg update")
def rollback(): """ Reverts project state to the last deploy. When a deploy is performed, the current state of the project is backed up. This includes the project files, the database, and all static files. Calling rollback will revert all of these to their state prior to the last deploy. """ with update_changed_requirements(): if env.deploy_tool in env.vcs_tools: with cd(env.repo_path): if env.deploy_tool == "git": run("GIT_WORK_TREE={0} git checkout -f " "`cat {0}/last.commit`".format(env.proj_path)) elif env.deploy_tool == "hg": run("hg update -C `cat last.commit`") with project(): with cd(join(static(), "..")): run("tar -xf %s/static.tar" % env.proj_path) else: with cd(env.proj_path.rsplit("/", 1)[0]): run("rm -rf %s" % env.proj_name) run("tar -xf %s.tar" % env.proj_name) with cd(env.proj_path): restore("last.db") restart()
def mac_setup_virtualenv(): with cd(mac_tmp): with prefix(mac_prefix): put("virtualenv-1.8.4.tar.gz", ".") run("tar xzf virtualenv-1.8.4.tar.gz") with cd("virtualenv-1.8.4"): run("python setup.py install --prefix=../usr")
def deploy(revision, first=False): ''' Make the application deploy. Example: fab production deploy:1.2 ''' env.user = DEPLOY_USER project_dir = os.path.join(env.ENV_APPS, APP_NAME) current_dir = os.path.join(project_dir, "current") current_static = os.path.join(current_dir, APP_NAME) current_static = os.path.join(current_static, "static") release_dir = _upload_source(revision, project_dir) with cd(project_dir): run('rm -rf current') run('ln -s %s current' % release_dir) with cd('/etc/nginx/sites-enabled/'): sudo('ln -sf %s/env/app.vhost %s.vhost' % (release_dir, APP_NAME)) with cd('/etc/supervisor/conf.d'): sudo('ln -sf %s/env/app_wsgi.conf %s.conf' % (release_dir, APP_NAME)) with cd(current_static): #TODO It would be nice configure this sudo('sudo ln -s /usr/local/lib/python2.7/dist-packages/django/contrib/admin/static/admin/ admin') with cd(release_dir): run("make update_deps") if first: run("make server_dbinitial") run("make migrate_no_input") run('sudo service nginx restart') run('sudo supervisorctl reload')
def mac_setup_numpy(): with cd(mac_tmp): run("git clone https://github.com/numpy/numpy") with cd("numpy"): run("git submodule init") run("git checkout -t origin/maintenance/1.9.x") run("git submodule update")
def mac_setup_paver(): with cd(mac_tmp): with prefix(mac_prefix): put("Paver-1.2.2.tar.gz", ".") run("tar xzf Paver-1.2.2.tar.gz") with cd("Paver-1.2.2"): run("python setup.py install --prefix=../usr")
def deploy(deployment_name, branch='master'): setup_env(deployment_name) with cd(env.code_src): run("git fetch origin") run("git checkout origin/%s" % branch) deploy_template(env) run('find . -name "*.pyc" -exec rm -rf {} \;') run('find . -type d -empty -delete') # numpy pip install from requirements file fails with source(env.virtualenv): run("pip install numpy") run("pip install -r %s" % env.pip_requirements_file) with cd(env.code_src): config_module = env.django_config_module local_settings_check(config_module) with source(env.virtualenv): run("python manage.py syncdb --settings=%s" % config_module) run("python manage.py migrate --settings=%s" % config_module) run("python manage.py collectstatic --settings=%s --noinput" % config_module) run("sudo %s restart" % env.celeryd) run("sudo /usr/local/bin/uwsgi --reload %s" % env.pid)
def download_janus_vim_conf(): sudo("pip install flake8 pylint") # To support python syntastic url = "https://raw.githubusercontent.com/" \ "ibotdotout/.devenv/master/janus/.vimrc.after" _wget(url) url = "https://raw.githubusercontent.com/" \ "ibotdotout/.devenv/master/janus/.vimrc.before" _wget(url) url = "https://raw.githubusercontent.com/" \ "ibotdotout/.devenv/master/vim/.vimrc.local" _wget(url) run("mkdir -p ~/.janus") with cd('~/.janus'): run("git clone " "https://github.com/altercation/vim-colors-solarized.git") run("git clone https://github.com/zhaocai/GoldenView.Vim.git") run("git clone https://github.com/myusuf3/numbers.vim.git") run("git clone https://github.com/bling/vim-airline.git") # install powerline font for vim-airline with cd('~/.fonts'): run("wget https://raw.githubusercontent.com/powerline/fonts/" "master/Meslo/Meslo%20LG%20M%20Regular%20for%20Powerline.otf") run("fc-cache -vf ~/.fonts/")
def install_hyphy(env): version = env.tool_version url = 'http://www.datam0nk3y.org/svn/hyphy' install_dir = env.system_install install_cmd = sudo if env.use_sudo else run if not exists(install_dir): install_cmd("mkdir -p %s" % install_dir) with _make_tmp_dir() as work_dir: with cd(work_dir): run("svn co -r %s %s src" % (version, url)) run("mkdir -p build/Source/Link") run("mkdir build/Source/SQLite") run("cp src/trunk/Core/*.{h,cp,cpp} build/Source") run("cp src/trunk/HeadlessLink/*.{h,cpp} build/Source/SQLite") run("cp src/trunk/NewerFunctionality/*.{h,cpp} build/Source/") run("cp src/SQLite/trunk/*.{c,h} build/Source/SQLite/") run("cp src/trunk/Scripts/*.sh build/") run("cp src/trunk/Mains/main-unix.cpp build/Source/main-unix.cxx") run("cp src/trunk/Mains/hyphyunixutils.cpp build/Source/hyphyunixutils.cpp") run("cp -R src/trunk/{ChartAddIns,DatapanelAddIns,GeneticCodes,Help,SubstitutionClasses,SubstitutionModels,TemplateBatchFiles,TopologyInference,TreeAddIns,UserAddins} build") run("rm build/Source/preferences.cpp") with cd("build"): run("bash build.sh SP") install_cmd("mv build/* %s" % install_dir) _update_default(env, install_dir)
def update_web(): """ This is how code gets reloaded: - Checkout code on the auxiliary server ADMIN whost - Checkout the latest code on all appservers - Remove all pyc files from app servers - Bounce celeryd, memcached , test services - Reload app code (touch wsgi file) Until we implement the checking out code to an isolated dir any failure on these steps need to be fixed or will result in breakage """ if env.admin_dir is not None: env.host_string = ADMIN_HOST with cd(os.path.join(env.admin_dir, 'unisubs')): _git_pull() for host in env.web_hosts: env.host_string = host with cd('{0}/unisubs'.format(env.web_dir)): python_exe = '{0}/env/bin/python'.format(env.web_dir) _git_pull() with settings(warn_only=True): run("find . -name '*.pyc' -print0 | xargs -0 rm") _bounce_celeryd() bounce_memcached() test_services() for host in env.web_hosts: env.host_string = host _reload_app_server()
def bootstrap_swan(): run('mkdir -pp %s' % SWAN_HOME) with cd(SWAN_HOME): if (not exists(os.path.join(SWAN_HOME, 'llvm'))): run("git clone %s llvm" % SWAN_LLVM_REPO) if (not exists(os.path.join(SWAN_HOME, 'llvm/tools/clang'))): run("git clone %s llvm/tools/clang" % SWAN_CLANG_REPO) run('mkdir -p build') with cd('build'): run('cmake -G "Unix Makefiles" ../llvm') run('make clean') run('make') test_clang() if (not exists(os.path.join(SWAN_HOME, 'swan_runtime'))): run("git clone %s" % SWAN_RT_REPO) with cd('swan_runtime'): run("libtoolize") run("aclocal") run("automake --add-missing") run("autoconf") run("./configure --prefix=%s/swan_runtime/lib CC=../build/bin/clang CXX=../build/bin/clang++" % SWAN_HOME) run("make clean") run("make") run("git clone https://github.com/project-asap/swan_tests.git") with cd("swan_tests"): run("make CXX=../build/bin/clang++ SWANRTDIR=../swan_runtime test")
def collect_remote_statics(name=None): """ Add leaflet and leaflet.draw in a repository watched by collectstatic. """ remote_static_dir = '{project_dir}/{project_name}/remote_static'.format(**env) run_as_umap('mkdir -p {0}'.format(remote_static_dir)) remote_repositories = { 'storage': 'git://github.com/yohanboniface/Leaflet.Storage.git@master', } with cd(remote_static_dir): for subdir, path in remote_repositories.iteritems(): if name and name != subdir: continue repository, branch = path.split('@') if "#" in branch: branch, ref = branch.split('#') else: ref = branch with hide("running", "stdout"): exists = run_as_umap('if [ -d "{0}" ]; then echo 1; fi'.format(subdir)) if exists: with cd(subdir): run_as_umap('git checkout {0}'.format(branch)) run_as_umap('git pull origin {0} --tags'.format(branch)) else: run_as_umap('git clone {0} {1}'.format(repository, subdir)) with cd(subdir): run_as_umap('git checkout {0}'.format(ref)) if subdir == "leaflet": run_as_umap('npm install') run_as_umap('jake build')
def install_hbase(): ''' http://hbase.apache.org/book.h\tml#quickstart ''' if not exists("/usr/local/lib/hbase-0.98.15-hadoop2"): with cd('/usr/local/lib'): if not exists("hbase-0.98.15-hadoop2-bin.tar.gz"): sudo("wget http://www.apache.org/dist/hbase/0.98.15/hbase-0.98.15-hadoop2-bin.tar.gz") sudo("tar -xvf hbase-0.98.15-hadoop2-bin.tar.gz") sudo("ln -s hbase-0.98.15-hadoop2 hbase") with cd("/usr/local/lib/hbase/conf"): hbase_site_xml_content= """ <configuration> <property> <name>hbase.rootdir</name> <value>hdfs://localhost:9000/hbase</value> </property> <property> <name>hbase.zookeeper.property.dataDir</name> <value>/home/hadoop/zookeeper</value> </property> <property> <name>hbase.cluster.distributed</name> <value>true</value> </property> </configuration> """ _replace_file_content("hbase-site.xml", hbase_site_xml_content) with cd('/usr/local/lib'): sudo("chown hadoop -R hbase-0.98.15-hadoop2") sudo("chmod -R u+rw hbase-0.98.15-hadoop2")
def install_wmt(): if not exists(WMT_HOME): with cd(ASAP_HOME): run("git clone %s" % WMT_REPO) with cd(WMT_HOME): run("npm install") run("grunt")
def ubuntu_install_cloudfoundry(master, *args, **kwargs): # DEPS, TODO: @depends(['go', 'bosh', 'vagrant']) command = 'go' if cmd_avail(command): local('echo {command} is already installed'.format(command=command)) else: install_go() command = 'bosh' if cmd_avail(command): local('echo {command} is already installed'.format(command=command)) else: ubuntu_actually_install_bosh(master) sudo('gem install bundler') loc_0 = 'deployments' run('mkdir {loc}'.format(loc=loc_0)) with cd(loc_0): loc_1 = 'cf-example' run('mkdir {loc}'.format(loc=loc_0)) with cd(loc_1): run('touch Gemfile') run('''source 'https://rubygems.org' ruby "1.9.3" gem "bosh_cli_plugin_aws" ''') run('bundle install')
def update(): u"""Function defining all steps required to properly update application.""" # Django app refresh: with contextlib.nested( cd('/var/www/volontuloapp_org/volontulo'), prefix('workon volontulo') ): run('git checkout master') run('git pull') run('pip install -r requirements.txt') # Gulp frontend refresh: with contextlib.nested( cd('/var/www/volontuloapp_org/volontulo/volontulo') ): run('npm install .') run('./node_modules/gulp/bin/gulp.js build') # Django site refresh: with contextlib.nested( cd('/var/www/volontuloapp_org'), prefix('workon volontulo') ): run('python manage.py migrate --traceback') run('service apache2 restart')
def git_website(): #if the directory doesnt exist, clone the repository if not exists("%s" % env.project_directory): #TODO: do a more proper clone, so it doesnt say x commits ahead of origin/2.0 with cd("%s" % env.project_root): #TODO: set up known_hosts before cloning to bypass key/security prompt run("git clone %s" % env.repository) #ensure we are in the right branch! with cd("%s" % env.project_directory): run("git checkout %s" % env.branch) #if the directory does exist, just fetch updates else: #ensure we are in the right branch! with cd("%s" % env.project_directory): run("git remote update") run("git checkout %s" % env.branch) #make sure we dont have any non-overwritable local changes run("git reset --hard HEAD") #clean any untracked files so we have no conflicts run("git clean -f") run("git checkout %s" % env.branch) #then pull run("git pull %s %s" % (env.repository, env.branch))
def updb(): with cd("/home/hxp"): with prefix(env.activate): run("python manage.py db migrate -m 'upgrade'") run('python manage.py db upgrade')
def create(): """ Creates the environment needed to host the project. The environment consists of: system locales, virtualenv, database, project files, SSL certificate, and project-specific Python requirements. """ # Generate project locale locale = env.locale.replace("UTF-8", "utf8") with hide("stdout"): if locale not in run("locale -a"): sudo("locale-gen %s" % env.locale) sudo("update-locale %s" % env.locale) sudo("service postgresql restart") run("exit") # Create project path run("mkdir -p %s" % env.proj_path) # Set up virtual env run("mkdir -p %s" % env.venv_home) with cd(env.venv_home): if exists(env.proj_name): if confirm("Virtualenv already exists in host server: %s" "\nWould you like to replace it?" % env.proj_name): run("rm -rf %s" % env.proj_name) else: abort() run("virtualenv %s" % env.proj_name) # Upload project files if env.deploy_tool in env.vcs_tools: vcs_upload() else: rsync_upload() # Create DB and DB user pw = db_pass() user_sql_args = (env.proj_name, pw.replace("'", "\'")) user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args psql(user_sql, show=False) shadowed = "*" * len(pw) print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed)) psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' " "LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" % (env.proj_name, env.proj_name, env.locale, env.locale)) # Set up SSL certificate if not env.ssl_disabled: conf_path = "/etc/nginx/conf" if not exists(conf_path): sudo("mkdir %s" % conf_path) with cd(conf_path): crt_file = env.proj_name + ".crt" key_file = env.proj_name + ".key" if not exists(crt_file) and not exists(key_file): try: crt_local, = glob(join("deploy", "*.crt")) key_local, = glob(join("deploy", "*.key")) except ValueError: parts = (crt_file, key_file, env.domains[0]) sudo("openssl req -new -x509 -nodes -out %s -keyout %s " "-subj '/CN=%s' -days 3650" % parts) else: upload_template(crt_local, crt_file, use_sudo=True) upload_template(key_local, key_file, use_sudo=True) # Install project-specific requirements upload_template_and_reload("settings") with project(): if env.reqs_path: pip("-r %s/%s" % (env.proj_path, env.reqs_path)) pip("gunicorn setproctitle psycopg2 " "django-compressor python-memcached") # Bootstrap the DB manage("createdb --noinput --nodata") python("from django.conf import settings;" "from django.contrib.sites.models import Site;" "Site.objects.filter(id=settings.SITE_ID).update(domain='%s');" % env.domains[0]) for domain in env.domains: python("from django.contrib.sites.models import Site;" "Site.objects.get_or_create(domain='%s');" % domain) if env.admin_pass: pw = env.admin_pass user_py = ("from django.contrib.auth import get_user_model;" "User = get_user_model();" "u, _ = User.objects.get_or_create(username='******');" "u.is_staff = u.is_superuser = True;" "u.set_password('%s');" "u.save();" % pw) python(user_py, show=False) shadowed = "*" * len(pw) print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed)) return True
def fab_create_ftp_folder(ftp_request, ceph_obj_list_by_data_class, srs_epsg=None): """ Creates an FTP folder for the requested tile data set Records the request in the database If an existing record already exists, counts the duplicates (?) """ username = ftp_request.user.username request_name = ftp_request.name user_email = [ftp_request.user.email.encode('utf8'), ] try: top_dir, ftp_dir = get_folders_for_user(ftp_request.user, request_name) python_path = settings.CEPHACCESS_PYTHON dl_script_path = settings.CEPHACCESS_DL_SCRIPT # Check for toplevel dir result = run("[ -d {0} ]".format(top_dir)) if result.return_code == 1: logger.error("FTP Task Error: No toplevel directory was found.") ftp_request.status = FTPStatus.DUPLICATE mail_msg = """\ An error was encountered on your data request named [{0}] for user [{1}]. No top level directory was found. Please forward this email to [{2}]""".format( request_name, username, settings.FTP_SUPPORT_MAIL,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: No top level directory was found." # Check for duplicate folders result = run("[ -d {0} ]".format(ftp_dir)) if result.return_code == 0: logger.error( "FTP Task Error: A duplicate FTP request toplevel directory \ was found.") ftp_request.status = FTPStatus.DUPLICATE mail_msg = """\ An error was encountered on your data request named [{0}] for user [{1}]. A duplicate FTP request toplevel directory was found. Please wait 5 minutes in between submitting FTP requests and creating FTP folders. If error still persists, forward this email to [{2}]""".format( request_name, username, settings.FTP_SUPPORT_MAIL,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: A duplicate FTP request toplevel directory was \ found." # Create toplevel directory for this FTP request result = run("mkdir -p {0}".format(ftp_dir)) if result.return_code is 0: with cd(ftp_dir): ln = run('ln -sf ../../../../../FAQ.txt ./') if ln.return_code != 0: logger.error('UNABLE TO CREATE SYMLINK FOR FAQ.txt') else: logger.info('SYMLINK CREATED') for data_class, ceph_obj_list \ in ceph_obj_list_by_data_class.iteritems(): type_dir = data_class.replace(" ", "_") # Projection path folders utm_51n_dir = os.path.join("UTM_51N", type_dir) reprojected_dir = "" if srs_epsg is not None: reprojected_dir = os.path.join( "EPSG-" + str(srs_epsg), type_dir) if srs_epsg is not None: if data_class == 'LAZ': # Do not reproject LAZ result = run("mkdir -p {0}".format(utm_51n_dir)) else: # Create a directory for each geo-type result = run( "mkdir -p {0}".format(reprojected_dir)) else: # Create a directory for each geo-type result = run("mkdir -p {0}".format(utm_51n_dir)) if result.return_code is not 0: # Handle error logger.error( "Error on FTP request: Failed to create data class\ subdirectory at [{0}]. Please notify the administrator of this error".format( ftp_dir)) ftp_request.status = FTPStatus.ERROR mail_msg = """\ An error was encountered on your data request named [{0}] for user [{1}]. The system failed to create an dataclass subdirectory inside the FTP folder at location [{2}]. Please forward this email to ({3}) so that we can address this issue. ---RESULT TRACE--- {4}""".format(request_name, username, os.path.join(ftp_dir, type_dir), settings.FTP_SUPPORT_MAIL, result,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: Failed to create data class subdirectory\ [{0}].".format(os.path.join(ftp_dir, type_dir)) obj_dl_list = " ".join(map(str, ceph_obj_list)) if srs_epsg is not None: if data_class == 'LAZ': # Download list of objects in corresponding # geo-type folder result = run("{0} {1} -d={2} {3}".format( python_path, dl_script_path, os.path.join(ftp_dir, utm_51n_dir), obj_dl_list)) else: # Download list of objects in corresponding # geo-type folder result = run("{0} {1} -d={2} -p={3} {4}".format( python_path, dl_script_path, os.path.join(ftp_dir, reprojected_dir), srs_epsg, obj_dl_list)) else: # Download list of objects in corresponding geo-type # folder result = run("{0} {1} -d={2} {3}".format( python_path, dl_script_path, os.path.join( ftp_dir, utm_51n_dir), obj_dl_list)) upload_xml(os.path.join(ftp_dir, utm_51n_dir),obj_dl_list) if result.return_code is not 0: # Handle error logger.error( "Error on FTP request: Failed to download file/s \ for dataclass [{0}].".format(data_class)) ftp_request.status = FTPStatus.ERROR mail_msg = """\ Cannot access Ceph Data Store. An error was encountered on your data request \ named [{0}] for user [{1}]. The system failed to download the following files: [{2}]. Either the file/s \ do/es not exist, or the Ceph Data Storage is down. Please forward this email to ({3}) that we \ can address this issue.. ---RESULT TRACE--- {4}""".format(request_name, username, obj_dl_list, settings.FTP_SUPPORT_MAIL, result,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: Failed to create folder [{0}].".format( ftp_dir) else: logger.error( "Error on FTP request: Failed to create FTP folder at [{0}].\ Please notify the administrator of this error".format(ftp_dir)) ftp_request.status = FTPStatus.ERROR mail_msg = """\ An error was encountered on your data request named [{0}] for user [{1}]. The system failed to create the toplevel FTP directory at location [{2}]. Please ensure that you are a legitimate user and have permission to use this FTP service. If you are a legitimate user, please e-mail the system administrator ({3}) regarding this error. ---RESULT TRACE--- {4}""".format(request_name, username, ftp_dir, settings.FTP_SUPPORT_MAIL, result,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: Failed to create folder [{0}].".format(ftp_dir) # email user once the files have been downloaded logger.info("FTP request has been completed for user [{0}]. Requested \ data is found under the DL directory path [{1}]".format( username, os.path.join("DL", request_name))) ftp_request.status = FTPStatus.DONE mail_msg = """\ Data request named [{0}] for user [{1}] has been succesfully processed. With your LiPAD username and password, please login with an FTPES client like Filezilla, to ftpes://ftp.dream.upd.edu.ph. Your requested datasets will be in a new folder named [{0}] under the directory [DL/DAD/] and will be \ available for 30 days only due to infrastructure limitations. FTP Server: ftpes://ftp.dream.upd.edu.ph/ Folder location: /mnt/ftp_pool/FTP/Others/{1}/DL/DAD/lipad_requests/{0} Encryption: Require explicit FTP over TLS Logon Type: Normal Username: {1} Password: [your LiPAD password] Please refer to the FTP access instructions [https://lipad.dream.upd.edu.ph/hel\ p/#download-ftp] for further information. For issues encountered, please email {2}\ """.format(request_name, username, settings.FTP_SUPPORT_MAIL) mail_ftp_user(username, user_email, request_name, mail_msg) return "SUCCESS: FTP request successfuly completed." except UserEmptyException as e: logger.error( "FTP request has failed. No FTP folder was found for username \ [{0}]. User may not have proper access rights to the FTP repository.\ ".format(username)) ftp_request.status = FTPStatus.ERROR mail_msg = """\ An error was encountered on your data request named [{0}] for user [{1}]. No FTP folder was found for username [{1}]. Please ensure you have access rights to the FTP repository. Otherwise, please contact the system administrator ({2}) regarding this error.""".format(request_name, username, settings.FTP_SUPPORT_MAIL) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: User [{0}] has no FTP folder: ".format(e.message) except Exception as e: error_trace = traceback.format_exc() logger.error("""An FTP request has failed with an unexpected error: {0}""".format(error_trace)) ftp_request.status = FTPStatus.ERROR mail_msg = """\ An unexpected error was encountered on your data request named [{0}] for user \ [{1}]. Please forward this mail to the system administrator ({2}). ---RESULT TRACE--- {3}""".format(request_name, username, settings.FTP_SUPPORT_MAIL, error_trace,) mail_ftp_user(username, user_email, request_name, mail_msg) return "ERROR: Unexpected error occurred:\n[{0}]".format(e.message) finally: ftp_request.save()
def virtualenv(path): with cd(path): with prefix('source bin/activate'): yield
def get_chef_sha(): with cd(CHEF_DIR): output = sudo("git rev-parse HEAD") return output.strip()
def copy_file(config_path, file_name): remote_file = "~/fabTestData/%s"%file_name if utils.check_remote_file_exist(remote_file) == "false": put("%s%s"%(config_path,file_name), "~/fabTestData/") with cd("~/fabTestData"): run("tar zxvfm %s"%file_name)
def wxtest(): with cd('/home/hxp'): with prefix(env.activate): run('python wx/wx.py runserver')
def _run_local_integ_tests(): """ Execute integ tests that run on magma access gateway """ with cd(AGW_INTEG_ROOT): run('make local_integ_test')
def _python_coverage(): with cd(AGW_PYTHON_ROOT): run('make coverage')
def test(): with cd('/home/hxp'): with prefix(env.activate): run('gunicorn manage:app -b 127.0.0.1:8000')
def _oai_coverage(): """ Get the code coverage statistic for OAI """ with cd(AGW_ROOT): run('make coverage_oai')
def _start_gateway(): """ Starts the gateway """ with cd(AGW_ROOT): run('make run')
def up1213_all(start, count): with cd('~/checkouts/haoku.net'): for x in range(start, start + count): up1213_cmd(x)
def _run_unit_tests(): """ Run the magma unit tests """ with cd(AGW_ROOT): # Run the unit tests run('make test')
def create_app_dir(): with cd(config['apps_path']): run('git clone {repo}'.format(**config))
def _build_magma(): """ Builds magma """ with cd(AGW_ROOT): run('make')
def migrate_refresh(): with cd(WEB_PATH): run('php artisan migrate:refresh --force')
def setup_webapp(): check_app() with cd('/etc/nginx/sites-enabled'): sudo('ln -s {path}/nginx.vhost urlsh.vhost'.format(**config)) with cd('/etc/supervisor/conf.d'): sudo('ln -s {path}/uwsgi.conf urlsh.conf'.format(**config))
def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git fetch') run('git reset --hard origin/master')
def update(): with cd('/home'): require.git.working_copy( '[email protected]:yeeyimedia/yeeyi_app_interface.git', path='app', branch='master')
def setup_env(): """ Set up the directory structure at env.host_site_path """ from fabric.api import sudo print('+ Creating directory structure') if files.exists(env.host_site_path): if console.confirm('Remove existing directory %s' % env.host_site_path): with hide('running', 'stdout'): run('rm -rf %s' % env.host_site_path) else: print('+ Directory not removed and recreated') return with hide('running', 'stdout'): run('mkdir -p %s' % env.host_site_path) with cd(env.host_site_path): with hide('running', 'stdout'): run('mkdir changesets files logs private') run('touch logs/access.log logs/error.log') print('+ Cloning repository: %s' % env.repo_url) run('%s clone %s private/repo' % (env.repo_type, env.repo_url)) if not 'url' in env: env.url = prompt( 'Please enter the site url (ex: qa4.dev.ombuweb.com): ') virtual_host = 'private/%s' % env.url if files.exists(virtual_host): run('rm %s' % virtual_host) virtual_host_contents = """<VirtualHost *:80> # Admin email, Server Name (domain name) and any aliases ServerAdmin [email protected] ServerName %%url%% # Index file and Document Root (where the public files are located) DirectoryIndex index.php DocumentRoot %%host_site_path%%/current # Custom log file locations ErrorLog %%host_site_path%%/logs/error.log CustomLog %%host_site_path%%/logs/access.log combined <Directory /> SetEnv APPLICATION_ENV %%host_type%% AllowOverride All AuthType Basic AuthName "Protected" AuthUserFile /vol/main/htpwd Require user dev1 Order deny,allow Deny from all Allow from 75.145.65.101 Satisfy any </Directory> </VirtualHost>""" files.append(virtual_host, virtual_host_contents) files.sed(virtual_host, '%%host_site_path%%', env.host_site_path) files.sed(virtual_host, '%%host_type%%', env.host_type) files.sed(virtual_host, '%%url%%', env.url) run('rm %s.bak' % virtual_host) sudo( 'if [ ! -L /etc/apache2/sites-available/%s ]; then ln -s %s /etc/apache2/sites-available/%s; fi' % (env.url, env.host_site_path + '/' + virtual_host, env.url)) sudo( 'if [ ! -L /etc/apache2/sites-enabled/%(url)s]; then ln -s ../sites-available/%(url)s /etc/apache2/sites-enabled/%(url)s; fi' % env) sudo('service apache2 force-reload') print('+ Site directory structure created at: %s' % env.host_site_path)
def virtualenv(): '''Activates virtualenv context for other commands to run inside it. ''' with cd(HERE): with prefix('source %(virtualenv_dir)s/bin/activate' % env): yield
def loaddata(): set_env_for_user('vagrant') with cd(env.code_dir): with _virtualenv(): _manage_py('loaddata apps/survey/fixtures/surveys.json.gz')
def pull(): "Pull new code" with cd(env.code_dir): run('git fetch') run('git reset --hard origin/master')
def backup(): with cd(env.backup_path): run("pg_dump -U %(db_username)s %(db_name)s > %(db_name)s_backup_$(date +%%F-%%T).sql" % env) run("ls -lt")
def migrate_db(): with cd(env.code_dir): with _virtualenv(): _manage_py('migrate --settings=config.environments.staging')
def backup_db_to_dropbox(): with cd(code_dir): run("chmod +x database-backup.sh") run("./database-backup.sh")
def runserver(): set_env_for_user('vagrant') with cd(env.code_dir): with _virtualenv(): _manage_py('runserver 0.0.0.0:8000')
def django_shell(): with cd(code_dir): run("docker-compose exec django sh")
def show_django_logs(): print("showing django logs") with cd(code_dir): run("docker-compose logs --follow django")