def _require_configured_openerp_log(): ## Managing openerp logs by syslog require.directory(OPENERP_LOG_FOLDER, use_sudo=True) params = { 'OPENERP_NORMAL_LOG_PATH' : OPENERP_NORMAL_LOG_PATH, 'OPENERP_ERROR_LOG_PATH' : OPENERP_ERROR_LOG_PATH, } require.files.template_file( path = '/etc/rsyslog.d/20-openerp.conf', template_source = 'files/etc/rsyslog.d/20-openerp.conf', context = params, owner=ADMIN_USER, group=ADMIN_GROUP, mode='644', use_sudo = True, ) require.service.restarted('rsyslog') ## Rotation of OpenERP's log params = { 'OPENERP_NORMAL_LOG_PATH_ALL' : OPENERP_NORMAL_LOG_PATH_ALL, 'OPENERP_ERROR_LOG_PATH_ALL' : OPENERP_ERROR_LOG_PATH_ALL, } require.files.template_file( path = '/etc/logrotate.d/openerp.conf', template_source = 'files/etc/logrotate.d/openerp.conf', context = params, owner=ADMIN_USER, group=ADMIN_GROUP, mode='644', use_sudo = True, )
def install_node10(): ''' install node 0.10.26 ''' if is_arm(): with settings(warn_only=True): result = run('node -v') is_installed = result.find('v0.10.26') if is_installed != -1: print(green("Node.js is already installed")) return True version = '0.10.26' node_url = 'http://nodejs.org/dist/v{0}/node-v{0}-linux-arm-pi.tar.gz' require.file(url=node_url.format(version)) run('tar -xzvf node-v%s-linux-arm-pi.tar.gz' % version) delete_if_exists('/opt/node') require.directory('/opt/node', use_sudo=True, owner='root') sudo('mv node-v%s-linux-arm-pi/* /opt/node' % version) sudo('ln -s /opt/node/bin/node /usr/local/bin/node') sudo('ln -s /opt/node/bin/node /usr/bin/node') sudo('ln -s /opt/node/bin/npm /usr/local/bin/npm') sudo('ln -s /opt/node/bin/npm /usr/bin/npm') else: require.nodejs.installed_from_source('0.10.26') print(green('node 0.10.26 successfully installed'))
def install_nodejs(): """ Test low level API """ from fabtools import nodejs from fabtools import require from fabtools.files import is_file # Upload local copy of source archive to speed up tests local_cache = '~/.vagrant.d/cache/fabtools/node-v%s.tar.gz' % nodejs.DEFAULT_VERSION if os.path.exists(local_cache): put(local_cache) # Install Node.js from source if nodejs.version() != nodejs.DEFAULT_VERSION: nodejs.install_from_source() assert is_file('/usr/local/bin/node') assert nodejs.version() == nodejs.DEFAULT_VERSION # Install / uninstall global package if not nodejs.package_version('underscore'): nodejs.install_package('underscore', version='1.4.2') assert nodejs.package_version('underscore') == '1.4.2' assert is_file('/usr/local/lib/node_modules/underscore/underscore.js') nodejs.uninstall_package('underscore') assert nodejs.package_version('underscore') == None assert not is_file('/usr/local/lib/node_modules/underscore/underscore.js') # Install / uninstall local package if not nodejs.package_version('underscore', local=True): nodejs.install_package('underscore', version='1.4.2', local=True) assert is_file('node_modules/underscore/underscore.js') assert nodejs.package_version('underscore', local=True) == '1.4.2' nodejs.uninstall_package('underscore', local=True) assert nodejs.package_version('underscore', local=True) == None assert not is_file('node_modules/underscore/underscore.js') # Install dependencies from package.json file require.directory('nodetest') with cd('nodetest'): require.file('package.json', contents=json.dumps({ 'name': 'nodetest', 'version': '1.0.0', 'dependencies': { 'underscore': '1.4.2' } })) nodejs.install_dependencies() assert is_file('node_modules/underscore/underscore.js') assert nodejs.package_version('underscore', local=True) == '1.4.2'
def install_controller(): ''' Install Cozy Controller Application Manager. Daemonize with supervisor. ''' # Check if controller is already installed, . with settings(warn_only=True): result = run('curl -X GET http://127.0.0.1:9002/drones/running') is_installed = result.find('Application is not authenticated') if is_installed != -1: print(green("Cozy Controller already installed")) return True sudo('npm install -g cozy-controller') require.directory('/etc/cozy', owner='root', use_sudo=True) require.supervisor.process( 'cozy-controller', command='cozy-controller', environment='NODE_ENV="production",BIND_IP_PROXY="127.0.0.1"', user='******') print('Waiting for cozy-controller to be launched...') program = 'curl -X GET http://127.0.0.1:9002/drones/running' def comparator(result): return result == 'Application is not authenticated' # Run curl until we get the MATCH_STR or a timeout if not try_delayed_run(program, comparator): print_failed('cozy-controller') print(green('Cozy Controller successfully started'))
def livebuild(proxy): print green("Livebuild with " + proxy) pkg('openssh-server live-build python git-core zsh', 'debootstrap') require.directory('/var/build/') mount_line('tmpfs /var/tmp tmpfs defaults 0 0') run_as_root('cat /etc/fstab') require.file('/etc/http_proxy', contents="http://%s/" % proxy, owner='root', group='root', mode='644', verify_remote=True, use_sudo=True) require.file('/etc/profile.d/proxy.sh', contents='export http_proxy="http://%s/"' % proxy, owner='root', group='root', mode='644', verify_remote=True, use_sudo=True) require.file('/usr/local/bin/manualbuild.sh', source='files/bin/manualbuild.sh', owner='root', group='root', mode='755', verify_remote=True, use_sudo=True) require.file('/usr/local/bin/gitbuild.sh', source='files/bin/gitbuild.sh', owner='root', group='root', mode='755', verify_remote=True, use_sudo=True) require.file('/etc/cron.daily/cacherepo', source='files/bin/cron_cacherepo', owner='root', group='root', mode='755', verify_remote=True, use_sudo=True)
def enable_app_server(*args, **kwargs): # require directory env.scow.project_var_dir exists, owner www-data proc_name = env.scow.project_tagged with prefix('workon ' + env.scow.project_tagged): uwsgi_bin = run('echo $VIRTUAL_ENV/bin/uwsgi') uwsgi_logfile = run( 'echo `cat $VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME`/var/log/uwsgi.log' ) web_user = '******' uwsgi_cmd = ' '.join(''' {uwsgi_bin} --socket {socket_path} --module {wsgi_app_module} --uid {web_user} --master --logto {uwsgi_logfile} '''.format( #process_name=env.scow.project_tagged, uwsgi_bin=uwsgi_bin, socket_path=path.join(env.scow.project_var_dir, 'uwsgi.sock'), wsgi_app_module=env.project.WSGI_APP_MODULE, web_user=web_user, uwsgi_logfile=uwsgi_logfile, ).split()) require.directory(env.scow.project_var_dir, owner=web_user) require.supervisor.process( proc_name, command=uwsgi_cmd, user=web_user, ) supervisor.start_process(proc_name)
def install_node10(): ''' install node 0.10.26 ''' if is_pi(): with settings(warn_only=True): result = run('node -v') is_installed = result.find('v0.10.26') if is_installed != -1: print(green("Node.js is already installed")) return True version = '0.10.26' node_url = 'http://nodejs.org/dist/v{0}/node-v{0}-linux-arm-pi.tar.gz' require.file(url=node_url.format(version)) run('tar -xzvf node-v%s-linux-arm-pi.tar.gz' % version) delete_if_exists('/opt/node') require.directory('/opt/node', owner='root') sudo('mv node-v%s-linux-arm-pi/* /opt/node' % version) sudo('ln -s /opt/node/bin/node /usr/local/bin/node') sudo('ln -s /opt/node/bin/node /usr/bin/node') sudo('ln -s /opt/node/bin/npm /usr/local/bin/npm') sudo('ln -s /opt/node/bin/npm /usr/bin/npm') else: require.nodejs.installed_from_source('0.10.26') print(green('node 0.10.26 successfully installed'))
def config_webserver(): banner("config web server") with virtualenv(): with cd(os.path.join(env.path, env.depot_name)): require.python.requirements('requirements.txt') with hide('output'): print green('Collect static files') run("python manage.py collectstatic --noinput") require.directory('/var/static/jianguo', use_sudo=True) sudo('cp -r publish/static/* /var/static/jianguo/') sudo('rm -r publish') require.directory('/var/wsgi/jianguo', use_sudo=True, group='www-data', owner='www-data') sudo('cp -r . /var/wsgi/jianguo') with cd('/var/wsgi/jianguo'): run("python manage.py migrate") print green('Copy supervisor conf') sudo('cp supervisord.conf /etc/supervisor/conf.d/jianguo.conf') sudo('/etc/init.d/supervisor stop') sudo('/etc/init.d/supervisor start') put('nginx.conf', '/etc/nginx/sites-available/jianguo.conf', use_sudo=True) with settings(warn_only=True): sudo('rm /etc/nginx/sites-enabled/default') sudo('nginx_ensite jianguo.conf') sudo('service nginx reload')
def desktop_basic(): execute(headless) require_deb_packages(''' baobab rdesktop x11vnc xtightvncviewer ssvnc zim synaptic gparted remmina scrot shutter avahi-discover avahi-ui-utils avahi-utils avahi-dnsconfd xdotool ''' ) require.directory('shots')
def installed_from_source(version=VERSION): """ Require Redis to be installed from source """ from fabtools import require require.user('redis') dest_dir = '/opt/redis-%(version)s' % locals() require.directory(dest_dir, use_sudo=True, owner='redis') if not is_file('%(dest_dir)s/redis-server' % locals()): with cd('/tmp'): # Download and unpack the tarball tarball = 'redis-%(version)s.tar.gz' % locals() require.file(tarball, url='http://redis.googlecode.com/files/' + tarball) run('tar xzf %(tarball)s' % locals()) # Compile and install binaries require.deb.package('build-essential') with cd('redis-%(version)s' % locals()): run('make') for filename in BINARIES: sudo('cp -pf src/%(filename)s %(dest_dir)s/' % locals()) sudo('chown redis: %(dest_dir)s/%(filename)s' % locals())
def headless(): # How do I enable the "multiverse" repository? # http://askubuntu.com/questions/89096/how-do-i-enable-the-multiverse-repository sudo('sed -i "/^# deb.*multiverse/ s/^# //" /etc/apt/sources.list') timezone() execute(ntpd) require_deb_packages( ''' sudo screen htop nmap iotop mc ssh nano cpuid curl wget nfs-kernel-server nfs-common ''') require.directory('bin') require.file('.profile', contents=cfg('profile.conf'))
def setup_environment(): """Setup users, groups, supervisor, etc.""" # FIXME: When `fabtools v0.21.0` gets released, remove this... with shell_env(SYSTEMD_PAGER=''): require.users.user( name=env.app_user, group=env.app_user, system=True, shell='/bin/bash', ) for path in (env.app_path, env.etc_path): require.directory( path=path, owner=env.app_user, group=env.app_user, use_sudo=True, ) require.python.virtualenv( directory=env.venv_path, venv_python='python3', user=env.app_user, use_sudo=True, ) require.supervisor.process( name=env.app_name, command='{} stream --verbose'.format(env.hadroid_botctl), user=env.app_user, directory=env.app_path, stdout_logfile='/var/log/hadroid.log', stderr_logfile='/var/log/hadroid-err.log', environment='HADROID_CONFIG={}'.format(env.hadroid_config), )
def install_controller(): ''' Install Cozy Controller Application Manager. Daemonize with supervisor. ''' # Check if controller is already installed, . with settings(warn_only=True): result = run('curl -X GET http://127.0.0.1:9002/drones/running') is_installed = result.find('Application is not authenticated') if is_installed != -1: print(green("Cozy Controller already installed")) return True sudo('npm install -g cozy-controller') require.directory('/etc/cozy', owner='root', use_sudo=True) require.supervisor.process( 'cozy-controller', command='cozy-controller', environment='NODE_ENV="production",BIND_IP_PROXY="127.0.0.1"', user='******' ) print('Waiting for cozy-controller to be launched...') program = 'curl -X GET http://127.0.0.1:9002/drones/running' def comparator(result): return result == 'Application is not authenticated' # Run curl until we get the MATCH_STR or a timeout if not try_delayed_run(program, comparator): print_failed('cozy-controller') print(green('Cozy Controller successfully started'))
def cache(app): """require an app to be put behind varnish - apt-get install varnish - create /etc/default/varnish - create /etc/varnish/main.vcl - create /etc/varnish/sites/ - create /etc/varnish/sites/{app.name}.vcl - create /etc/varnish/sites.vcl (and require it to contain the correct include!) - /etc/init.d/varnish restart - adapt nginx site config - /etc/init.d/nginx reload """ require.deb.package('varnish') deployment.sudo_upload_template('varnish', dest='/etc/default/varnish') deployment.sudo_upload_template('varnish_main.vcl', dest='/etc/varnish/main.vcl') require.directory(str(app.varnish_site.parent), use_sudo=True) deployment.sudo_upload_template('varnish_site.vcl', dest=str(app.varnish_site), app_name=app.name, app_port=app.port, app_domain=app.domain) _update_varnish_sites(app.varnish_site.parent) _update_nginx(app, with_varnish=True)
def build_box(requirements, settings): """ Args: requirements (Requirements): settings (Settings): Yields: BuildBoxEnv: """ local('vagrant destroy -f') local('vagrant up') with vagrant_settings(): require.deb.ppa('ppa:spotify-jyrki/dh-virtualenv', auto_accept=True) require.deb.package(requirements.dh_virtualenv) require.deb.packages(requirements.project_package) require.directory('~/.ssh', mode='700') require.file('~/.ssh/config', contents=settings.ssh_config) gitconfig = os.path.expanduser('~/.gitconfig') if os.path.exists(gitconfig): require.file('~/.gitconfig', source=gitconfig) require.git.working_copy(settings.repo, branch=settings.branch, path=settings.project) yield BuildBoxEnv(Settings.project) local('vagrant destroy -f')
def install_qgis2(): """Install QGIS 2 under /usr/local/qgis-master. TODO: create one function from this and the 1.8 function above for DRY. """ _all() add_ubuntugis_ppa() sudo('apt-get build-dep qgis') fabtools.require.deb.package('cmake-curses-gui') fabtools.require.deb.package('git') clone_qgis(branch='master') code_base = '/home/%s/dev/cpp' % env.user code_path = '%s/Quantum-GIS' % code_base build_path = '%s/build-master' % code_path build_prefix = '/usr/local/qgis-master' require.directory(build_path) with cd(build_path): fabtools.require.directory( build_prefix, use_sudo=True, owner=env.user) run('cmake .. -DCMAKE_INSTALL_PREFIX=%s' % build_prefix) run('make install')
def installed_from_source(version=VERSION): """ Require Redis to be installed from source. The compiled binaries will be installed in ``/opt/redis-{version}/``. """ from fabtools import require require.user('redis', home='/var/lib/redis') dest_dir = '/opt/redis-%(version)s' % locals() require.directory(dest_dir, use_sudo=True, owner='redis') if not is_file('%(dest_dir)s/redis-server' % locals()): with cd('/tmp'): # Download and unpack the tarball tarball = 'redis-%(version)s.tar.gz' % locals() require.file(tarball, url='http://redis.googlecode.com/files/' + tarball) run('tar xzf %(tarball)s' % locals()) # Compile and install binaries require.deb.package('build-essential') with cd('redis-%(version)s' % locals()): run('make') for filename in BINARIES: sudo('cp -pf src/%(filename)s %(dest_dir)s/' % locals()) sudo('chown redis: %(dest_dir)s/%(filename)s' % locals())
def setup_docs_web_site(branch='master'): """Initialise an InaSAFE docs site where we host docs and pdf. :param branch: Which branch of the documentation to build. :type branch: str """ build_docs() fabtools.require.deb.package('apache2') apache_conf_template = 'inasafe-doc.conf.templ' if not exists(web_directory): require.directory('mkdir -p %s/pdf' % web_directory, True, 'web') # TODO: Fix perms below sudo('chown -R %s.%s %s' % ('web', 'web', web_directory)) apache_path = '/etc/apache2/sites-available/' # Clone and replace tokens in apache conf local_dir = os.path.dirname(__file__) local_file = os.path.abspath(os.path.join( local_dir, 'scripts', apache_conf_template)) context = { 'server_name': 'inasafe.org', # Web Url e.g. foo.com 'web_master': '*****@*****.**', # email of web master 'document_root': web_directory, # Content root .e.g. /var/www } fastprint(green('Using %s for template' % local_file)) destination = '%s/inasafe-docs.conf' % apache_path upload_template( local_file, destination, context=context, use_sudo=True) with cd(code_path): # Copy built Documentation to the Webserver path run('cp -r docs/output/html/* %s' % web_directory) run('cp -r docs/output/pdf %s' % web_directory) run('cp scripts/.htaccess %s' % web_directory) run('cp scripts/directory*.html %s/en/_static/' % web_directory) # Add a hosts entry for local testing - only really useful for localhost hosts = '/etc/hosts' if not contains(hosts, 'inasafe-docs'): append(hosts, '127.0.0.1 inasafe-doc.localhost', use_sudo=True) require.apache.enable('inasafe-docs') require.apache.disable('default') sudo('a2enmod rewrite') restart('apache2')
def configure_supervisor(): directory(remote_config_dir, use_sudo=True) if exists(os.path.join(remote_config_dir, "router_log_parser.conf")) is False: with lcd(local_config_dir): with cd(remote_config_dir): put("./router_log_parser.conf", "./", use_sudo=True) supervisor.update_config()
def setup_remotely(): """Set up the container on a remote server - uses nested fabgis calls. Use this task when you want to set up the container on a remote server. It will log in to the server and then run fabric in a shell session so that all requests appear to originate locally. This is a work around for current inability to tunnel cleanly into the docker container from outside the docker's host. """ with cd(work_dir): run('echo "fabgis" > requirements.txt') setup_venv(work_dir) container_id = current_docker_container() if container_id is None: container_id = create_docker_container(image='fabgis/sshd') port_mappings = get_docker_port_mappings(container_id) ssh_port = port_mappings[22] run('wget -O fabfile.py https://github.com/AIFDR/inasafe-doc' '/raw/master/fabfile.py') require.directory('scripts') run('wget -O scripts/inasafe-doc.conf.templ https://github' '.com/AIFDR/inasafe-doc/raw/master/scripts/inasafe-doc.conf.templ') run('wget -O scripts/inasafe.org.mod_proxy.conf.templ https://github' '.com/AIFDR/inasafe-doc/raw/master/scripts/inasafe.org.mod_proxy.' 'conf.templ') run('venv/bin/fab -H root@%s:%i setup_web_user' % (env.host, ssh_port)) run('venv/bin/fab -H web@%s:%i setup_docs_web_site' % (env.host, ssh_port))
def enable_app_server(*args, **kwargs): # require directory env.scow.project_var_dir exists, owner www-data proc_name = env.scow.project_tagged with prefix('workon ' + env.scow.project_tagged): uwsgi_bin = run('echo $VIRTUAL_ENV/bin/uwsgi') uwsgi_logfile = run('echo `cat $VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME`/var/log/uwsgi.log') web_user = '******' uwsgi_cmd=' '.join(''' {uwsgi_bin} --socket {socket_path} --module {wsgi_app_module} --uid {web_user} --master --logto {uwsgi_logfile} '''.format( #process_name=env.scow.project_tagged, uwsgi_bin=uwsgi_bin, socket_path=path.join(env.scow.project_var_dir, 'uwsgi.sock'), wsgi_app_module=env.project.WSGI_APP_MODULE, web_user=web_user, uwsgi_logfile=uwsgi_logfile, ).split()) require.directory(env.scow.project_var_dir, owner=web_user) require.supervisor.process( proc_name, command=uwsgi_cmd, user=web_user, ) supervisor.start_process(proc_name)
def create_downloads(app): """create all configured downloads""" require.directory(str(app.download_dir), use_sudo=True, mode='777') # run the script to create the exports from the database as glottolog3 user run_script.execute_inner(app, 'create_downloads') require.directory(str(app.download_dir), use_sudo=True, mode='755')
def set_project_settings_class(settings_class, *args, **kwargs): # TODO: Abstract something #print('***ONE') require.directory(path.join(env.scow.dirs.VAR_DIR, 'env')) require.files.file( path.join(env.scow.dirs.VAR_DIR, 'env', 'DJANGO_SETTINGS_CLASS'), contents=str(settings_class)) print('***TWO')
def livebuild(): pkg('openssh-server live-build python git-core apt-cacher-ng zsh', 'debootstrap') run('update-rc.d apt-cacher-ng defaults') service.start('apt-cacher-ng') require.directory('/var/build/') for conf in [ '/etc/profile', '/root/.bashrc', '/root/.zshrc' ]: run('echo "export http_proxy=http://localhost:3142/" >> %s' % conf)
def set_project_settings_class(settings_class, *args, **kwargs): # TODO: Abstract something #print('***ONE') require.directory(path.join(env.scow.dirs.VAR_DIR, 'env')) require.files.file(path.join(env.scow.dirs.VAR_DIR, 'env', 'DJANGO_SETTINGS_CLASS'), contents=str(settings_class)) print('***TWO')
def setup_docs_web_proxy(): """Set up a mod proxy based vhost to forward web traffic to internal host. If container_id is none, it will also install docker and set up the entire documentation web site inside that docker container. """ require.directory(work_dir) with cd(work_dir): run('echo "fabgis" > requirements.txt') setup_venv(work_dir) container_id_file = 'fabgis.container.id' if not exists(container_id_file): setup_docker() setup_remotely() container_id = current_docker_container() port_mappings = get_docker_port_mappings(container_id) http_port = port_mappings[80] fabtools.require.deb.package('apache2') sudo('a2enmod proxy proxy_http') context = { 'internal_host': env.host, 'internal_port': http_port, 'server_name': 'inasafe.org' } apache_conf_template = 'inasafe.org.mod_proxy.conf.templ' apache_path = '/etc/apache2/sites-available' # Clone and replace tokens in apache conf local_dir = os.path.dirname(__file__) local_file = os.path.abspath(os.path.join( local_dir, 'scripts', apache_conf_template)) fastprint(green('Using %s for template' % local_file)) destination = '%s/inasafe.org.conf' % apache_path upload_template( local_file, destination, context=context, use_sudo=True) require.apache.enable('inasafe.org') restart('apache2')
def setup_postgresql_backups(instance_name, aws_s3_prefix, aws_access_key_id, aws_secret_access_key, backup_script=None, backup_config=None): """ Setup daily SQL dumps of the Postgres server to Amazon S3 """ backup_script = backup_script if backup_script is not None else DEFAULT_BACKUP_SCRIPT backup_config = backup_config if backup_config is not None else DEFAULT_BACKUP_CONFIG puts(green('Setting up Postgresql backups')) # Cleanup old config files sudo('rm -f /etc/default/pg_backup') sudo('rm -f /etc/cron.d/pg_backup') # Skip if backups are not enabled if not env.enable_backups: puts('Backups not enabled, skipping...') return # Skip if backups are not configured if not aws_s3_prefix: puts('Target bucket not configured, skipping...') return setup_s3cmd(aws_access_key_id, aws_secret_access_key) require.deb.package('tree') # Postgres backup script backup_script_path = '/usr/local/bin/pg_backup_rotated.sh' config_name = 'pg_backup_{}'.format(instance_name) require.file( backup_script_path, contents=backup_script.format(config_name=config_name), owner='root', group='root', mode='0755', use_sudo=True ) # Postgres backup configuration file require.file( '/etc/default/{}'.format(config_name), contents=backup_config.format(aws_s3_prefix=aws_s3_prefix), owner='root', group='root', mode='0644', use_sudo=True ) # Backups directory require.directory('/var/backups/postgresql', owner='postgres', use_sudo=True) # Add crontab entry fabtools.cron.add_task(config_name, '50 1 * * *', 'postgres', backup_script_path)
def install_nodejs(): """ Test low level API """ from fabtools import nodejs from fabtools import require from fabtools.files import is_file # Install Node.js from source if nodejs.version() != nodejs.DEFAULT_VERSION: nodejs.install_from_source() assert is_file('/usr/local/bin/node') assert nodejs.version() == nodejs.DEFAULT_VERSION # Install / uninstall global package if not nodejs.package_version('underscore'): nodejs.install_package('underscore', version='1.4.2') assert nodejs.package_version('underscore') == '1.4.2' assert is_file('/usr/local/lib/node_modules/underscore/underscore.js') nodejs.uninstall_package('underscore') assert nodejs.package_version('underscore') is None assert not is_file('/usr/local/lib/node_modules/underscore/underscore.js') # Install / uninstall local package if not nodejs.package_version('underscore', local=True): nodejs.install_package('underscore', version='1.4.2', local=True) assert is_file('node_modules/underscore/underscore.js') assert nodejs.package_version('underscore', local=True) == '1.4.2' nodejs.uninstall_package('underscore', local=True) assert nodejs.package_version('underscore', local=True) is None assert not is_file('node_modules/underscore/underscore.js') # Install dependencies from package.json file require.directory('nodetest') with cd('nodetest'): require.file('package.json', contents=json.dumps({ 'name': 'nodetest', 'version': '1.0.0', 'dependencies': { 'underscore': '1.4.2' } })) nodejs.install_dependencies() assert is_file('node_modules/underscore/underscore.js') assert nodejs.package_version('underscore', local=True) == '1.4.2'
def _add_env(venv): # Add TOGILE path require.directory(tg.TOGILE_PATH, owner=USER, use_sudo=True) # Add venv require.python.virtualenv(venv, user=USER, use_sudo=True) # Pull/Update GIT Repo with cd(tg.TOGILE_PATH): require.git.working_copy(tg.TOGILE_REPO, user=USER, use_sudo=True)
def mysql_dump(): """ Runs mysqldump. Result is stored at /srv/active/sql/ """ if fabtools.mysql.database_exists(env.db_name): dump_dir = '/srv/active/sql/' require.directory(dump_dir, use_sudo=True) now = datetime.now().strftime("%Y.%m.%d-%H.%M") sudo('mysqldump -u root -p%s %s > %s' % (env.db_root_password, env.db_name, os.path.join(dump_dir, '%s-%s.sql' % (env.db_name, now))))
def pip_wraper(): with lcd('/home/ihor/.pip'): require.directory('~/.pip', owner='web') #require.file('~/.pip/pip.conf', source='pip.conf') text = """ export WORKON_HOME=$HOME/.virtualenvs export PROJECT_HOME=$HOME/Devel source /usr/local/bin/virtualenvwrapper.sh """ require.directory('~/.virtualenvs', owner='web') require.file('~/.bashrc', contents=text)
def setup_docs_web_site(branch='master'): """Initialise an InaSAFE docs site where we host docs and pdf. :param branch: Which branch of the documentation to build. :type branch: str """ build_docs() fabtools.require.deb.package('apache2') apache_conf_template = 'inasafe-doc.conf.templ' if not exists(web_directory): require.directory('mkdir -p %s/pdf' % web_directory, True, 'web') # TODO: Fix perms below sudo('chown -R %s.%s %s' % ('web', 'web', web_directory)) apache_path = '/etc/apache2/sites-available/' # Clone and replace tokens in apache conf local_dir = os.path.dirname(__file__) local_file = os.path.abspath( os.path.join(local_dir, 'scripts', apache_conf_template)) context = { 'server_name': 'inasafe.org', # Web Url e.g. foo.com 'web_master': '*****@*****.**', # email of web master 'document_root': web_directory, # Content root .e.g. /var/www } fastprint(green('Using %s for template' % local_file)) destination = '%s/inasafe-docs.conf' % apache_path upload_template(local_file, destination, context=context, use_sudo=True) with cd(code_path): # Copy built Documentation to the Webserver path run('cp -r docs/output/html/* %s' % web_directory) run('cp -r docs/output/pdf %s' % web_directory) run('cp scripts/.htaccess %s' % web_directory) run('cp scripts/directory*.html %s/en/_static/' % web_directory) # Add a hosts entry for local testing - only really useful for localhost hosts = '/etc/hosts' if not contains(hosts, 'inasafe-docs'): append(hosts, '127.0.0.1 inasafe-doc.localhost', use_sudo=True) require.apache.enable('inasafe-docs') require.apache.disable('default') sudo('a2enmod rewrite') restart('apache2')
def setup_master(): ''' Install and configure salt master at specified host ''' require.deb.packages(['salt-master']) require.directory('/srv/salt', owner='aert', use_sudo=True) master_conf = "/etc/salt/master" append(master_conf, "file_roots:", use_sudo=True) append(master_conf, " base:", use_sudo=True) append(master_conf, " - /srv/salt", use_sudo=True) require.service.restarted('salt-master')
def setup_docs_web_proxy(): """Set up a mod proxy based vhost to forward web traffic to internal host. If container_id is none, it will also install docker and set up the entire documentation web site inside that docker container. """ require.directory(work_dir) with cd(work_dir): run('echo "fabgis" > requirements.txt') setup_venv(work_dir) container_id_file = 'fabgis.container.id' if not exists(container_id_file): setup_docker() setup_remotely() container_id = current_docker_container() port_mappings = get_docker_port_mappings(container_id) http_port = port_mappings[80] fabtools.require.deb.package('apache2') sudo('a2enmod proxy proxy_http') context = { 'internal_host': env.host, 'internal_port': http_port, 'server_name': 'inasafe.org' } apache_conf_template = 'inasafe.org.mod_proxy.conf.templ' apache_path = '/etc/apache2/sites-available' # Clone and replace tokens in apache conf local_dir = os.path.dirname(__file__) local_file = os.path.abspath( os.path.join(local_dir, 'scripts', apache_conf_template)) fastprint(green('Using %s for template' % local_file)) destination = '%s/inasafe.org.conf' % apache_path upload_template(local_file, destination, context=context, use_sudo=True) require.apache.enable('inasafe.org') restart('apache2')
def upload_local_folder(): with lcd(env.root): local("tar cfz /tmp/release-bundle.tgz " + "./*") sudo('rm -rf %(repo_path)s/*' % env) put("/tmp/release-bundle.tgz", "%(repo_path)s" % env, use_sudo=True) with cd('%(repo_path)s' % env): sudo("tar xf release-bundle.tgz") put(env.root + "requirements.txt", env.repo_path, use_sudo=True) require.directory('%(repo_path)s/static' % env, owner="www-data", use_sudo=True)
def test_initial_owner_requirement(users): from fabtools.require import directory try: directory('testdir', owner='testuser', use_sudo=True) assert is_dir('testdir') assert owner('testdir') == 'testuser' finally: run_as_root('rmdir testdir')
def deploy_celery(): """Deploy celery service scripts to appropriate location.""" require.file('/etc/init.d/%(app_name)s-celeryd' % env, source='deploy/celeryd' % env, use_sudo=True, mode='755') require.files.template_file( '/etc/default/%(app_name)s-celeryd' % env, template_source='deploy/celeryd-default' % env, context=dict(app_name=env.app_name, app_dir=env.app_dir), use_sudo=True ) sudo('update-rc.d %(app_name)s-celeryd defaults' % env) require.directory('/var/log/%(app_name)s' % env, use_sudo=True, owner=env.app_user, group='www-data') require.directory('/var/run/%(app_name)s' % env, use_sudo=True, owner=env.app_user, group='www-data')
def test_directory_creation(): from fabtools.require import directory try: directory('testdir') assert is_dir('testdir') assert owner('testdir') == env.user finally: run('rmdir testdir')
def nominatim(): pgconfig(for_import=True) with cd('/opt/osm'): nominatime_archive = 'Nominatim-%s.tar.bz2' % config.NOMINATIM_VERSION nominatim_url = 'http://www.nominatim.org/release/'\ + nominatime_archive require.file(url=nominatim_url, use_sudo=True, owner=config.GIS_USER) nominatim_dir = sudo('''tar tf %s | sed -e 's@/.*@@' | uniq''' % nominatime_archive, user=config.GIS_USER) sudo('''tar xvf ''' + nominatime_archive, user=config.GIS_USER) with cd(nominatim_dir): sudo('./autogen.sh', user=config.GIS_USER) sudo('./configure', user=config.GIS_USER) sudo('make', user=config.GIS_USER) context = { 'db_name': config.GIS_DB, 'db_user': config.GIS_USER, 'db_passowrd': config.GIS_PASSWORD, } require.files.template_file(path='settings/local.php', template_source='templates/local.php', context=context, use_sudo=True, owner=config.GIS_USER) with cd('data'): wiki_urls = [ 'http://www.nominatim.org/data/wikipedia_article.sql.bin', 'http://www.nominatim.org/data/wikipedia_redirect.sql.bin' ] for url in wiki_urls: require.file(url=url, use_sudo=True, owner=config.GIS_USER) sudo('./utils/setup.php --osm-file %s --all --osm2pgsql-cache %d' % (pbf_path(), config.RAM_SIZE / 4 * 3), user=config.GIS_USER) sudo('./utils/specialphrases.php --countries > sp_countries.sql', user=config.GIS_USER) sudo('psql -d %s -f sp_countries.sql' % config.GIS_DB, user=config.GIS_USER) sudo('./utils/specialphrases.php --wiki-import > sp.sql', user=config.GIS_USER) sudo('psql -d %s -f sp.sql' % config.GIS_DB, user=config.GIS_USER) require.directory('/var/www/nominatim', mode='755', owner=config.GIS_USER, use_sudo=True) sudo('./utils/setup.php --create-website /var/www/nominatim', user=config.GIS_USER) require.apache.site('200-nominatim.conf', template_source='templates/200-nominatim.conf') require.service.restarted('apache2') pgconfig(for_import=False)
def fs(): """ Create some empty dirs for the static and config files. """ for name, value in env.items(): if name.endswith('dir') and 'local' not in name: require.directory(value, owner=env.user) if not exists(env.settings_dir / 'local_settings.py'): append(env.local_settings, ('# -*- coding: utf-8 -*-\n\n' 'from __future__ import unicode_literals, ' 'absolute_import\n\n' 'from .{} import *\n\n').format(env.stage))
def get_webvirt(): """ Clone WebVirtMgr and Add it to installation location """ require.directory(fsettings.INSTALL_PATH, use_sudo=True) with cd(fsettings.INSTALL_PATH): require.git.working_copy(fsettings.REPO_URL, use_sudo=True) webvirt_path = os.path.join(fsettings.INSTALL_PATH, "webvirtmgr") with cd(webvirt_path): install_requirements("requirements.txt", use_sudo=True) sudo("python manage.py syncdb") # --noinput and load fixtures?! sudo("python manage.py collectstatic --noinput") # just say yes!
def require_venv(directory, require_packages=None, assets_name=None, requirements=None): require.directory(str(directory), use_sudo=True) with settings(sudo_prefix=env.sudo_prefix + ' -H'): # set HOME for pip log/cache require.python.virtualenv(str(directory), venv_python='python3', use_sudo=True) with python.virtualenv(str(directory)): if require_packages: require.python.packages(require_packages, use_sudo=True) if requirements: require.python.requirements(requirements, use_sudo=True) if assets_name: sudo('webassets -m %s.assets build' % assets_name)
def copy_downloads(app, source_dir, pattern='*'): """copy downloads for the app""" require.directory(str(app.download_dir), use_sudo=True, mode='777') source_dir = pathlib.Path(source_dir) for f in source_dir.glob(pattern): require.file(str(app.download_dir / f.name), source=f, use_sudo=True, owner=app.name, group=app.name) require.directory(str(app.download_dir), use_sudo=True, mode='755')
def setup_project(): """ Require working copy of project cloned from remote repository. """ require.directory(env.project_dir, use_sudo=True, owner=env.user, group="www-data") with cd(env.project_dir): require.git.working_copy(env.config["repository"], directly=True) require.file( os.path.join(env.project_dir, env.config["project"], "secrets.json"), json.dumps(env.secrets))
def install(version, sqlpass): """ Seafile installation from the official documentation http://manual.seafile.com/deploy/using_mysql.html Ex: fab -H [email protected] install:version=4.1.0,sqlpass=sqlpass """ # Get linux architecture arch = system.get_arch() if arch == "x86_64": archprefix = "x86-64" else: archprefix = "i386" # Commons vars destinstall = "/opt/seafile" serverfile = 'seafile-server_%(version)s_%(archprefix)s.tar.gz' % locals() # # Up to date # require.deb.update_index() # # Requirements for installation require.deb.packages([ 'python2.7', 'python-setuptools', 'python-imaging', 'python-mysqldb', ]) # Install and set MySQL password require.mysql.server(version='5.5', password=sqlpass) # # Download a server file with cd("/tmp"): require.file( url='https://bitbucket.org/haiwen/seafile/downloads/%(serverfile)s' % locals()) # Prepare Install require.directory(destinstall) with cd(destinstall): utils.run_as_root('mv /tmp/seafile-server_* .') utils.run_as_root('tar -xvzf seafile-server_*' % locals()) require.directory('installed') utils.run_as_root('mv seafile-server_* installed' % locals()) # Install MySQL with cd('%(destinstall)s/seafile-server-%(version)s' % locals()): utils.run_as_root('./setup-seafile-mysql.sh')
def install(version,sqlpass): """ Seafile installation from the official documentation http://manual.seafile.com/deploy/using_mysql.html Ex: fab -H [email protected] install:version=4.1.0,sqlpass=sqlpass """ # Get linux architecture arch = system.get_arch() if arch == "x86_64": archprefix = "x86-64" else: archprefix = "i386" # Commons vars destinstall = "/opt/seafile" serverfile = 'seafile-server_%(version)s_%(archprefix)s.tar.gz' % locals() # # Up to date # require.deb.update_index() # # Requirements for installation require.deb.packages([ 'python2.7', 'python-setuptools', 'python-imaging', 'python-mysqldb', ]) # Install and set MySQL password require.mysql.server(version='5.5', password=sqlpass) # # Download a server file with cd("/tmp"): require.file( url='https://bitbucket.org/haiwen/seafile/downloads/%(serverfile)s' % locals() ) # Prepare Install require.directory(destinstall) with cd(destinstall): utils.run_as_root('mv /tmp/seafile-server_* .') utils.run_as_root('tar -xvzf seafile-server_*' % locals()) require.directory('installed') utils.run_as_root('mv seafile-server_* installed' % locals()) # Install MySQL with cd('%(destinstall)s/seafile-server-%(version)s' % locals()): utils.run_as_root('./setup-seafile-mysql.sh')
def instance(name, version=VERSION, **kwargs): """ Require a Redis instance to be running The instance will be managed using supervisord. """ from fabtools import require installed_from_source(version) require.directory('/etc/redis', use_sudo=True, owner='redis') require.directory('/var/db/redis', use_sudo=True, owner='redis') require.directory('/var/log/redis', use_sudo=True, owner='redis') require.directory('/var/run/redis', use_sudo=True, owner='redis') # Required for background saving with settings(warn_only=True): require.system.sysctl('vm.overcommit_memory', '1') # Set default parameters params = {} params.update(kwargs) params.setdefault('bind', '127.0.0.1') params.setdefault('port', '6379') params.setdefault('logfile', '/var/log/redis/redis-%(name)s.log' % locals()) params.setdefault('loglevel', 'verbose') params.setdefault('dbfilename', '/var/db/redis/redis-%(name)s-dump.rdb' % locals()) params.setdefault('save', ['900 1', '300 10', '60 10000']) # Build config file from parameters # (keys such as 'save' may result in multiple config lines) lines = [] for key, value in sorted(params.items()): if isinstance(value, list): for elem in value: lines.append("%s %s" % (key, elem)) else: lines.append("%s %s" % (key, value)) redis_server = '/opt/redis-%(version)s/redis-server' % locals() config_filename = '/etc/redis/%(name)s.conf' % locals() # Upload config file context = dict(need_restart=False) def on_change(): context['need_restart'] = True with watch(config_filename, True, on_change): require.file(config_filename, contents='\n'.join(lines), use_sudo=True, owner='redis') # Use supervisord to manage process process_name = 'redis_%s' % name require.supervisor.process(process_name, user='******', directory='/var/run/redis', command="%(redis_server)s %(config_filename)s" % locals()) if context['need_restart']: fabtools.supervisor.restart_process(process_name)
def setup_web(): require.oracle_jdk.installed(version='8u25-b17') require.tomcat.installed(version='8.0.14') if not require.service.started(): require.service.start('tomcat') require.nginx.server() require.nginx.site(env.project_name, template_source='nginx-superrocket.site', port=80, server_alias='', static_path=env.static_path) require.nginx.disable('default') require.directory(env.home + '/logs')
def stop(app, maintenance_hours=1): """pause app by changing the supervisord config create a maintenance page giving a date when we expect the service will be back :param maintenance_hours: Number of hours we expect the downtime to last. """ if maintenance_hours is not None: require.directory(str(app.www_dir), use_sudo=True) timestamp = helpers.strfnow(add_hours=maintenance_hours) sudo_upload_template('503.html', dest=str(app.www_dir / '503.html'), app_name=app.name, timestamp=timestamp) require_supervisor(app.supervisor, app, pause=True) supervisor.update_config() service.reload('nginx')
def setup_tilestream(): """Set up tile stream - see https://github.com/mapbox/tilestream. This one deserves a little explanation: Tilestream is a nodejs application. Node seems to be pretty unparticular about maintaining api compatibility between releases so if you grab one from e.g. apt, chances are it won't work with tilestream. To address this, we use the nodeenv virtualisation environment (somewhat equivalent to using python virtualenv) to ensure that we have the expected version of tilestream. e.g.:: nodeenv env --node=0.8.15 """ setup_env() require.deb.package('curl') require.deb.package('build-essential') require.deb.package('libssl-dev') require.deb.package('libsqlite3-0') require.deb.package('libsqlite3-dev') require.deb.package('git-core') require.deb.package('nodejs nodejs-dev npm') require.deb.package('python-pip') sudo('pip install nodeenv') dev_dir = '/home/%s/dev/javascript' % env.fg.user fastprint('making directory %s' % dev_dir) require.directory(dev_dir) tile_stream_dir = os.path.join(dev_dir, 'tilestream') fastprint('checkout out tilestream to %s' % tile_stream_dir) if not exists(tile_stream_dir): with cd(dev_dir): run('git clone http://github.com/mapbox/tilestream.git') with cd(tile_stream_dir): if not exists(os.path.join(tile_stream_dir, 'env')): run('nodeenv env --node=0.8.15') # If doing this interactively from a shell you would first do: # . env/bin/activate # npm install # From our scripted environment (where we cant activate the venv): run('env/bin/npm install')
def remote_files(): dirs=[ 'static', 'templates' ] require.files.directory(dirs) #新建多个目录 require.directory('myapp') #新建一个目录 with cd('myapp'): put('app.py','myapp/app.py') #上传文件 require.file('application.cfg',contents='TESTING=True') tmp=require.files.temporary_directory() local('tar -zcf myapp.tar.gz myapp') tmp_file=path.join(tmp,'myapp.tar.gz') put(tmp_file) run('tar -zxf %s' %tmp_file) run('rm -rf myapp.tar.gz')
def configure_git(): """ 1. Setup bare Git Repo 2. Create post-recieve hook """ require.directory(GIT_DIR, use_sudo=True) with cd(GIT_DIR): sudo('mkdir gage-web.git') with cd('gage-web.git'): sudo('git init --bare') with lcd(LOCAL_CONFIG_DIR): with cd('hooks'): put('./post-receive', './', use_sudo=True) sudo('chmod +x post-receive') with lcd(LOCAL_APP_DIR): local( 'git remote add production {user}@{server}:{GIT_DIR}/gage-web.git'. format(user=env.user, server=env.host_string, GIT_DIR=GIT_DIR))
def install_controller(): ''' Install Cozy Controller Application Manager. Daemonize with supervisor. ''' # Check if controller is already installed, . with settings(warn_only=True): result = run('curl -X GET http://127.0.0.1:9002/') is_installed = result.find('{"error":"Wrong auth token"}') if is_installed != -1: print(green("Cozy Controller already installed")) return True sudo('npm install -g cozy-controller') require.directory('/etc/cozy', owner='root', use_sudo=True) require.directory('/etc/cozy/pids', owner='root', use_sudo=True) require.files.file(path='/etc/cozy/controller.token', mode='700', contents=TOKEN, use_sudo=True, owner='cozy-home') path = '/usr/local/lib/node_modules/cozy-controller/bin/cozy-controller' require.supervisor.process('cozy-controller', command="%s -u --auth --per 755" % path, environment='NODE_ENV="production"', user='******') supervisor.stop_process('cozy-controller') ## In case where two cozy-controllers are started with settings(warn_only=True): sudo('pkill -9 node') supervisor.start_process('cozy-controller') print('Waiting for cozy-controller to be launched...') program = 'curl -X GET http://127.0.0.1:9002/' def comparator(result): return result == '{"error":"Wrong auth token"}' # Run curl until we get the MATCH_STR or a timeout if not try_delayed_run(program, comparator): print_failed('cozy-controller') print(green('Cozy Controller successfully started'))