def install(): sudo('rm -rf {}'.format(SETTINGS['proj']['remote-root'])) sudo('rm -rf {}'.format(SETTINGS['proj']['remote-venv'])) git.clone('https://github.com/ccortezia/featuring.git', path=SETTINGS['proj']['remote-root'], use_sudo=False) git.checkout(SETTINGS['proj']['remote-root'], branch='api-revamp') require.python.virtualenv(SETTINGS['proj']['remote-venv'], venv_python='/usr/bin/python3.7') require.python.pip() with python.virtualenv(SETTINGS['proj']['remote-venv']): python.install_requirements(os.path.join(COMPROOT, 'requirements.txt')) upload_template( filename=SETTINGS['supervisor']['proj-local'], destination=SETTINGS['supervisor']['proj-remote'], context={}, use_sudo=True) sudo('mkdir -p {}'.format(os.path.dirname(SETTINGS['gunicorn']['proj-remote']))) put(SETTINGS['gunicorn']['proj-local'], SETTINGS['gunicorn']['proj-remote'], use_sudo=True) with python.virtualenv(SETTINGS['proj']['remote-venv']): require.python.package('gunicorn')
def deploy_front(now): with cd(env.deployment_dir(now)): run(u'wget {} -O front.zip'.format(env.fronttaxi_archive)) run('unzip front.zip') with cd(env.fronttaxi_dir(now)), python.virtualenv(env.apitaxi_venv_path(now)): python.install_requirements('requirements.txt') put(environ['APITAXI_CONFIG_FILE'], env.fronttaxi_config_path(now))
def requirements(filename, pip_cmd='pip', python_cmd='python', allow_external=None, allow_unverified=None, **kwargs): """ Require Python packages from a pip `requirements file`_. Starting with version 1.5, pip no longer scrapes insecure external urls by default and no longer installs externally hosted files by default. Use ``allow_external=['foo', 'bar']`` or ``allow_unverified=['bar', 'baz']`` to change these behaviours for specific packages. :: from fabtools.python import virtualenv from fabtools import require # Install requirements in an existing virtual environment with virtualenv('/path/to/venv'): require.python.requirements('requirements.txt') .. _requirements file: http://www.pip-installer.org/en/latest/requirements.html """ pip(MIN_PIP_VERSION, python_cmd=python_cmd) install_requirements(filename, pip_cmd=pip_cmd, allow_external=allow_external, allow_unverified=allow_unverified, **kwargs)
def deploy(): """ deploy xsendfile_example application """ if not env.hosts: abort(USAGE) # create package from code commit = local('git stash create', capture=True) if not commit: commit = 'HEAD' local('git archive %s > %s' % (commit, PACKAGE_NAME)) # copy package to VM put(PACKAGE_NAME, '~') with cd('~'): # install package run('mkdir xsendfile_example', quiet=True) with cd('xsendfile_example'): run('tar -xf ~/%s' % PACKAGE_NAME) # install application dependencies with python.virtualenv('/home/vagrant/venv'): python.install_requirements('requirements.txt') # create sample media file run('mkdir media', quiet=True) with cd('media'): run('echo "%s" | base64 -d > red_dot.png' % RED_DOT) sudo('service apache2 restart')
def deploy_api(commit='master'): now = int(time.time()) require.files.directory(env.deployment_dir(now)) with cd(env.deployment_dir(now)): run(u'wget {}'.format(env.apitaxi_archive.format(commit))) run('unzip {}.zip'.format(commit)) if commit != 'master': run('mv APITaxi-{} APITaxi-master'.format(commit)) with cd(env.apitaxi_dir(now)): require.python.virtualenv(env.apitaxi_venv_path(now)) with python.virtualenv(env.apitaxi_venv_path(now)): python.install_pip(use_sudo=False) require.python.package('uwsgi') python.install_requirements('requirements.txt') put(environ['APITAXI_CONFIG_FILE'], env.apitaxi_config_path(now)) with shell_env(APITAXI_CONFIG_FILE=env.apitaxi_config_path(now)): for i in range(1, 30): if service.is_running('supervisor'): break time.sleep(1) run('python manage.py db upgrade') install_admin_user() deploy_front(now) deploy_nginx_api_site(now) if not service.is_running('nginx'): service.start('nginx') clean_directories(now) stop_old_processes(now) restart_stats_workers(now)
def requirements(filename, pip_cmd='pip', python_cmd='python', **kwargs): """ Require Python packages from a pip `requirements file`_. .. _requirements file: http://www.pip-installer.org/en/latest/requirements.html """ pip(MIN_PIP_VERSION, python_cmd=python_cmd) install_requirements(filename, pip_cmd=pip_cmd, **kwargs)
def requirements(filename, **kwargs): """ Require Python packages from a pip `requirements file`_. .. _requirements file: http://www.pip-installer.org/en/latest/requirements.html """ pip(DEFAULT_PIP_VERSION) install_requirements(filename, **kwargs)
def update_venv(with_extras=False): """ Update virtual environment """ with virtualenv(env.virtenv): if with_extras: install_requirements( env.code_root + '/extra-requirements.txt', upgrade=True) install_requirements(env.code_root + '/requirements.txt')
def install_venv_requirements(): print_title('Installing remote virtual env requirements') with virtualenv(Settings.DIR_VENV): for path in Settings.REQUIRMENTS_FILES: if files.exists(path): install_requirements(path, use_sudo=False) print_success("Installed: {}".format(path)) else: print_error("File missing: {}".format(path)) return
def install_venv_requirements(py_version): env_path, ver_name, env_name = get_env(py_version) print_title('Installing remote virtual env requirements') with virtualenv(env_path): for path in Settings.REQUIRMENTS_FILES: if files.exists(path): install_requirements(path, use_sudo=False) print_success("Installed: {}".format(path)) else: print_error("File missing: {}".format(path)) return
def get_webvirt(): """ Clone WebVirtMgr and Add it to installation location """ require.directory(fsettings.INSTALL_PATH, use_sudo=True) with cd(fsettings.INSTALL_PATH): require.git.working_copy(fsettings.REPO_URL, use_sudo=True) webvirt_path = os.path.join(fsettings.INSTALL_PATH, "webvirtmgr") with cd(webvirt_path): install_requirements("requirements.txt", use_sudo=True) sudo("python manage.py syncdb") # --noinput and load fixtures?! sudo("python manage.py collectstatic --noinput") # just say yes!
def install_indexer(): ''' Install Cozy Data Indexer. Use supervisord to daemonize it. ''' home = '/usr/local/cozy-indexer' indexer_dir = '%s/cozy-data-indexer' % home indexer_env_dir = '%s/virtualenv' % indexer_dir python_exe = indexer_dir + '/virtualenv/bin/python' indexer_exe = 'server.py' process_name = 'cozy-indexer' # Check if indexer is already installed, . with settings(warn_only=True): result = run('curl -X GET http://127.0.0.1:9102/') is_installed = result.find("Cozy Data Indexer") if is_installed != -1: print(green("Data Indexer already installed")) return True require.files.directory(home, use_sudo=True) with cd(home): if files.exists('cozy-data-indexer'): su_delete('cozy-data-indexer') sudo('git clone https://github.com/cozy/cozy-data-indexer.git') require.python.virtualenv(indexer_env_dir, use_sudo=True) with python.virtualenv(indexer_env_dir): python.install_requirements( indexer_dir + '/requirements/common.txt', use_sudo=True) sudo('chown -R cozy:cozy %s' % home) require.supervisor.process( process_name, command='%s %s' % (python_exe, indexer_exe), directory=indexer_dir, user='******' ) supervisor.restart_process(process_name) time.sleep(10) result = run('curl -X GET http://127.0.0.1:9102/') is_installed = result.find("Cozy Data Indexer") if is_installed == -1: print_failed("cozy-data-indexer") print(green("Data Indexer successfully started"))
def update_indexer(): ''' Update Cozy indexer module. ''' home = '/usr/local/cozy-indexer' indexer_dir = '%s/cozy-data-indexer' % home indexer_env_dir = '%s/virtualenv' % indexer_dir with cd(indexer_dir): sudo('git pull origin master') with python.virtualenv(indexer_env_dir): python.install_requirements(indexer_dir + '/requirements/common.txt', upgrade=True, use_sudo=True) supervisor.restart_process('cozy-indexer')
def deploy(): require("install_location") sudo("mkdir -p %(install_location)s" % env) if not is_pip_installed(): install_pip() install("virtualenv", use_sudo=True) put(join_local("pip-requirements.txt"), "/tmp/pip-requirements.txt") with _virtualenv(): install_requirements("/tmp/pip-requirements.txt", use_sudo=True) sudo("pip install -e git+https://github.com/oliverdrake/rpitempcontroller.git#egg=tempcontrol") put("tempcontroller-config.default", env.config_file, use_sudo=True, mirror_local_mode=True) with settings(python_bin_dir=os.path.join(_virtualenv_location(), "bin")): upload_template("init-script.in", env.init_script, mode=0754, use_jinja=True, context=env, use_sudo=True, backup=False) sudo("chown root:root %(init_script)s" % env)
def deploy_api(): now = int(time.time()) require.files.directory(env.deployment_dir(now)) with cd(env.deployment_dir(now)): run('wget https://github.com/openmaraude/APITaxi/archive/master.zip') run('unzip master.zip') with cd(env.apitaxi_dir(now)): require.python.virtualenv(env.apitaxi_venv_path(now)) with python.virtualenv(env.apitaxi_venv_path(now)): python.install_pip() require.python.package('uwsgi') python.install_requirements('requirements.txt') put(environ['APITAXI_CONFIG_FILE'], env.apitaxi_config_path(now)) run('python manage.py db upgrade') deploy_nginx_api_site(now) if not service.is_running('nginx'): service.start('nginx') clean_directories(now) stop_old_processes(now)
def install_dependencies(): """Ensure all dependencies listed in requirements.txt are installed.""" with cd(env.app_dir): install_requirements('requirements.txt', use_sudo=True)
def provision(): project_path = '/srv/www/savings_champion/savings_champion' virtualenv_path = '/home/savingschampion/.virtualenv/savings_champion' require.files.directory('/srv/www/', use_sudo=True, owner='savingschampion', group='savingschampion') deb.update_index() require.postgres.server() require.deb.packages([ 'libpq-dev', 'memcached', 'libtiff4-dev', 'libjpeg8-dev', 'zlib1g-dev', 'libfreetype6-dev', 'liblcms1-dev', 'libwebp-dev' ]) require.postgres.user('savings_champion', 'r8Y@Mge#aTtW', createdb=True) require.postgres.database('savings_champion', 'savings_champion') require.python.virtualenv(virtualenv_path) require.python.packages(['uwsgi', 'flower'], use_sudo=True) require.files.directory('/var/log/uwsgi/', use_sudo=True) require.files.directory('/var/log/celeryd/', use_sudo=True) require.files.directory('/var/log/flower/', use_sudo=True) require.nodejs.installed_from_source() require.nodejs.package('yuglify') require.postfix.server('5.9.87.167') with virtualenv(virtualenv_path): python.install_requirements("/".join( [project_path, 'requirements.txt']), upgrade=True, quiet=False) with cd('%s' % project_path): if not exists('settings.py'): run('ln -s settings_production.py settings.py') run('python manage.py collectstatic --noinput') run('python manage.py syncdb --noinput') run('python manage.py migrate') require.deb.package('rabbitmq-server') sudo('rabbitmqctl add_vhost guest') sudo('rabbitmqctl set_permissions -p guest guest ".*" ".*" ".*"') uwsgi_path = '/tmp/uwsgi.sock' with cd('%s' % project_path): require.supervisor.process( 'uwsgi', command= "uwsgi --socket %s --master --workers 4 --home %s --chdir %s --file django.wsgi --harakiri=60 --reaper" % (uwsgi_path, virtualenv_path, project_path), directory=project_path, user='******', stdout_logfile='/var/log/uwsgi/savingschampion.log', stopsignal='QUIT') supervisor.restart_process('uwsgi') require.file('celerybeat-schedule.db', use_sudo=True, owner='www-data', group='www-data') for celery_pool in xrange(1, stop=3): require.supervisor.process( 'celery-%s' % celery_pool, command= "%s/bin/celery worker -E -A celeryapp --concurrency=10 -n worker%s.localhost" % (virtualenv_path, celery_pool), directory=project_path, user='******', stdout_logfile='/var/log/celeryd/savingschampion.log', redirect_stderr=True, environment='DJANGO_SETTINGS_MODULE=settings') supervisor.restart_process('celery') require.nginx.server() require.nginx.disable('default') require.nginx.site('savings_champion', template_contents=NGINX_NOSSL_CONFIG, enabled=True, check_config=True, port=80) require.nginx.site('savings_champion_ssl', template_contents=NGINX_SSL_PRODUCTION_CONFIG, enabled=True, check_config=True, port=443, server_alias='savingschampion.co.uk', doc_root=project_path, proxy_url=uwsgi_path, static_path='/'.join([project_path, '..', 'static']), media_path='/'.join([project_path, 'assets']), ssl_certificate='www_savingschampion_co_uk.cert', ssl_key='www_savingschampion_co_uk.key') require.supervisor.process( 'flower', command= "flower --broker=amqp://guest@localhost:5672// --address=0.0.0.0 --basic_auth=savingschampion:Over9000!", stdout_logfile='/var/log/flower/savingschampion.log', redirect_stderr=True, user="******")
def _virtualenv(): with virtualenv(VIRTUALENV_PATH): python.install_requirements('/'.join( [PROJECT_PATH, 'requirements.txt']), upgrade=True, quiet=False)
def _virtualenv(): with virtualenv(VIRTUALENV_PATH): python.install_requirements('/'.join([PROJECT_PATH, 'requirements.txt']), upgrade=True, quiet=False)
def provision(): project_path = '/srv/www/savings_champion/savings_champion' virtualenv_path = '/home/savingschampion/.virtualenv/savings_champion' require.files.directory('/srv/www/', use_sudo=True, owner='savingschampion', group='savingschampion') deb.update_index() require.postgres.server() require.deb.packages(['libpq-dev', 'memcached', 'libtiff4-dev', 'libjpeg8-dev', 'zlib1g-dev', 'libfreetype6-dev', 'liblcms1-dev', 'libwebp-dev']) require.postgres.user('savings_champion', 'r8Y@Mge#aTtW', createdb=True) require.postgres.database('savings_champion', 'savings_champion') require.python.virtualenv(virtualenv_path) require.python.packages(['uwsgi', 'flower'], use_sudo=True) require.files.directory('/var/log/uwsgi/', use_sudo=True) require.files.directory('/var/log/celeryd/', use_sudo=True) require.files.directory('/var/log/flower/', use_sudo=True) require.nodejs.installed_from_source() require.nodejs.package('yuglify') require.postfix.server('5.9.87.167') with virtualenv(virtualenv_path): python.install_requirements("/".join([project_path, 'requirements.txt']), upgrade=True, quiet=False) with cd('%s' % project_path): if not exists('settings.py'): run('ln -s settings_production.py settings.py') run('python manage.py collectstatic --noinput') run('python manage.py syncdb --noinput') run('python manage.py migrate') require.deb.package('rabbitmq-server') sudo('rabbitmqctl add_vhost guest') sudo('rabbitmqctl set_permissions -p guest guest ".*" ".*" ".*"') uwsgi_path = '/tmp/uwsgi.sock' with cd('%s' % project_path): require.supervisor.process('uwsgi', command="uwsgi --socket %s --master --workers 4 --home %s --chdir %s --file django.wsgi --harakiri=60 --reaper" % ( uwsgi_path, virtualenv_path, project_path), directory=project_path, user='******', stdout_logfile='/var/log/uwsgi/savingschampion.log', stopsignal='QUIT') supervisor.restart_process('uwsgi') require.file('celerybeat-schedule.db', use_sudo=True, owner='www-data', group='www-data') for celery_pool in xrange(1, stop=3): require.supervisor.process('celery-%s' % celery_pool, command="%s/bin/celery worker -E -A celeryapp --concurrency=10 -n worker%s.localhost" % (virtualenv_path, celery_pool), directory=project_path, user='******', stdout_logfile='/var/log/celeryd/savingschampion.log', redirect_stderr=True, environment='DJANGO_SETTINGS_MODULE=settings') supervisor.restart_process('celery') require.nginx.server() require.nginx.disable('default') require.nginx.site('savings_champion', template_contents=NGINX_NOSSL_CONFIG, enabled=True, check_config=True, port=80) require.nginx.site('savings_champion_ssl', template_contents=NGINX_SSL_PRODUCTION_CONFIG, enabled=True, check_config=True, port=443, server_alias='savingschampion.co.uk', doc_root=project_path, proxy_url=uwsgi_path, static_path='/'.join([project_path, '..', 'static']), media_path='/'.join([project_path, 'assets']), ssl_certificate='www_savingschampion_co_uk.cert', ssl_key='www_savingschampion_co_uk.key') require.supervisor.process('flower', command="flower --broker=amqp://guest@localhost:5672// --address=0.0.0.0 --basic_auth=savingschampion:Over9000!", stdout_logfile='/var/log/flower/savingschampion.log', redirect_stderr=True, user="******")
def install_venv_requirements(): print_title('Installing remote virtual env requirements') with virtualenv(settings.DIR_VENV): install_requirements('{0}requirements/remote.txt'.format( settings.DIR_CODE), use_sudo=False)