def upload_solr_collection(): # upload a pre-build Solr collection with cd(data_dir): sudo('chmod -R a+w .') rsync_project(data_dir, local_dir = 'deploy/production/solr_data/') with cd(data_dir): sudo('chmod -R a-w .') sudo('chown -R www-data:www-data project')
def checkout(url, workdir, name): if not exists(env.deploy_key_path): run('mkdir -p %s/.ssh' % env.home) require.file(env.home + '/.ssh/config', source='_ops/ssh_config') require.file(env.deploy_key_path, source='/Users/alankang/.ssh/id_rsa') run('chmod 0600 ' + env.deploy_key_path) if exists(os.path.join(workdir, name)): with cd(os.path.join(workdir, name)): run("ssh-agent bash -c 'ssh-add %s; git pull'" % env.deploy_key_path) else: with cd(workdir): run("ssh-agent bash -c 'ssh-add %s; git clone %s %s'" % (env.deploy_key_path, url, name))
def flask_manage(instance, command): dirname = "/srv/www/%s" % instance cmd = "export MAPROULETTE_SETTINGS=%s/config.py &&\ source %s/virtualenv/bin/activate && python\ manage.py %s" % (dirname, dirname, command) with cd("%s/htdocs/maproulette" % dirname): sudo(cmd, user="******")
def deploy_build(): """ Build the environment remotely """ _copy_files() with cd('builds/adamw523blog'): run('fig build')
def _start_node2(*args, **kwargs): server_dir = _get_server_dir(**kwargs) # Don't use this in production! - Build a dist and deploy that with cd(server_dir), shell_env(SOCK_URL=kwargs["SERVER_LOCATION"]): run("npm i") # "app.set('SOCK_URL', undefined);", "app.set('SOCK_URL', '{SERVER_LOCATION}');" """ sed('src/server.ts', ', undefined', ', "{SERVER_LOCATION}"'.format( SERVER_LOCATION=kwargs['SERVER_LOCATION'] ), use_sudo=True) """ """sudo(''' sed -i src/server.ts -r -e "s|, undefined|, '{SERVER_LOCATION}'/g" '''.format( SERVER_LOCATION=kwargs['SERVER_LOCATION'] ), shell_escape=True)""" src = "src/server.ts" sio = StringIO() get(src, sio, use_sudo=True) s = sio.read() s.replace( "app.set('SOCK_URL', undefined);", "app.set('SOCK_URL', '{SERVER_LOCATION}');".format( SERVER_LOCATION=kwargs["SERVER_LOCATION"] ), ) sio = StringIO(s) put(sio, src, use_sudo=True) run("npm start")
def setup_slaves(): print "Setting up slave %s" % env.host with settings(warn_only=True): sudo('service postgresql stop') host = env.roledefs['master'][0] if host not in env.addresses: raise KeyError("could not find master IP address") print host master_ip = env.addresses[host]['eth0'] append(pg_conf, 'wal_level = hot_standby', use_sudo = True) append(pg_conf, 'max_wal_senders = 5', use_sudo = True) append(pg_conf, 'wal_keep_segments = 32', use_sudo = True) append(pg_conf, 'hot_standby = on', use_sudo = True) with cd(os.path.dirname(data_path)): with settings(warn_only=True): sudo('mv main main.%d' % time.time() ) print "Starting backup from master %s" % master_ip sudo('pg_basebackup -P -x -h %s -U project -D main' % master_ip) sudo('rm -f main/backup_label') sudo('chown -R postgres:postgres main') print "Done." append(recovery_conf, "standby_mode = 'on'", use_sudo = True) append(recovery_conf, "primary_conninfo = 'host=%s user=project'" % master_ip, use_sudo = True) sudo('service postgresql start')
def build_server_file(self): with cd(self.sys_deploy_path): run('tar -zxf ' + self.name + '.tar.gz', quiet=True) run('rm ' + self.name + '.tar.gz') t_mkdir(self.sys_deploy_path + self.name + '/logs') t_mkdir(self.sys_deploy_path + self.name + '/tmp') t_chmod(self.sys_deploy_path + self.name + '/logs', '777', '-R') t_chmod(self.sys_deploy_path + self.name + '/tmp', '777', '-R')
def update_symbolic_links(build_number): with cd(dist_base_dir): if is_link('config', verbose=True): run('unlink config') run('ln -s ./dist/%s/config config' % build_number) if is_link('deploy', verbose=True): run('unlink deploy') run('ln -s ./dist/%s/deploy deploy' % build_number)
def verify_checksums(build_number): print('# verify checksums -----------------------------------') md5cmd = "md5sum" with cd(dist_base_dir + '/dist/' + build_number): result = run('%s -c checksums.txt' % md5cmd) if not result.return_code == 0: print('It\'s not matched with checksums.txt. Something wrong... bad files...') else: print('verify OK!!')
def checkout_repo(instance, branch=None): dirname = "/srv/www/%s/htdocs/maproulette" % instance if branch: cmd = "git clone https://github.com/osmlab/maproulette.git -b %s %s" %\ (branch, dirname) else: cmd = "git clone https://github.com/osmlab/maproulette.git %s" %\ (dirname) with cd("/srv/www"): sudo(cmd, user="******")
def deploy(): checkout('[email protected]:akngs/snake.git', '%s/prjs' % env.home, 'snake') require.files.template_file('%s/prjs/snake/djangohome/snake/settings_production.py' % env.home, template_source='djangohome/snake/settings_production.py', context=env) with cd('%s/prjs/snake' % env.home): run('make product') require.files.template_file('/etc/supervisor/conf.d/snake.conf', template_source='_ops/supervisord.conf', context=env, use_sudo=True) require.files.template_file('/etc/nginx/sites-available/snake', template_source='_ops/nginx.conf', context=env, use_sudo=True) sudo('ln -f -s /etc/nginx/sites-available/snake /etc/nginx/sites-enabled/snake') restart()
def build_server_file(self): with cd(self.sys_deploy_path): rm_all_except() # 5.4 解压 sudo('tar -zxf ' + self.name + '.tar.gz --strip-components 1', quiet=True) # 解压完了是web下面的东西 sudo('rm ' + self.name + '.tar.gz') t_mkdir(self.sys_deploy_path + 'logs') t_mkdir(self.sys_deploy_path + 'tmp') t_chmod(self.sys_deploy_path + 'logs', '777', '-R') t_chmod(self.sys_deploy_path + 'tmp', '777', '-R')
def install_reqs(upgrade=False, requirements_file=None): """Install required packages into virtualenv. Default requirements_file is requirements/{env.type}.txt """ if not requirements_file: requirements_file = 'requirements/{}.txt'.format(env.type) with cd('project'): run('pip -q install {0} -r {1}'.format('--upgrade' if upgrade else '', requirements_file))
def deploy(): # prepare directory for jdk prepare_directory_if_necessary() if not exists('%s/%s' % (remote_path, bin_filename), verbose=False): # put binary file to remote_path print 'put %s to %s' % (bin_filename, remote_path) put(bin_filename, remote_path, use_sudo=True) print 'extract jdk-8u102-linux-x64.tar.gz' with cd(remote_path): sudo('tar xvzf jdk-8u102-linux-x64.tar.gz') if exists('jdk1.8.0', verbose=True): print 'symbolic link exists for jdk1.8.0 so unlink that' sudo('unlink jdk1.8.0') sudo ('ln -s ./jdk1.8.0_102 jdk1.8.0') sudo('rm -rfv jdk-8u102-linux-x64.tar.gz') print 'check java version that was installed...' with cd('%s/%s' % (remote_path, 'jdk1.8.0/bin')): run('./java -version')
def checkout_repo(instance, is_dev, branch=None): dirname = "/srv/www/%s/htdocs/maproulette" % instance git_org = 'osmlab' if is_dev: git_org = 'mvexel' if branch: cmd = "git clone https://github.com/{}/maproulette.git -b {} {}".format( git_org, branch, dirname) else: cmd = "git clone https://github.com/{}/maproulette.git {}".format( git_org, dirname) with cd("/srv/www"): sudo(cmd, user="******")
def download_dbnsfp(genomes): """Back compatible download target for dbNSFP, to be moved to GGD recipes. """ folder_name = "variation" genome_dir = os.path.join(env.data_files, "genomes") gids = set(["hg19", "GRCh37"]) for (orgname, gid, manager) in ((o, g, m) for (o, g, m) in genomes if g in gids and m.config.get("dbnsfp")): vrn_dir = os.path.join(genome_dir, orgname, gid, folder_name) if not env.safe_exists(vrn_dir): env.safe_run('mkdir -p %s' % vrn_dir) with cd(vrn_dir): _download_dbnsfp(env, gid, manager.config)
def setup_virtualenv(path='~/.env'): """Create new virtualenv, activate it in ~/.profile""" if not exists(path): v = '1.11.4' tarball = 'virtualenv-' + v + '.tar.gz' run('curl --insecure ' '-O https://pypi.python.org/packages/source/v/virtualenv/' + tarball) run('tar xvfz ' + tarball) with cd('virtualenv-' + v): run('python virtualenv.py {}'.format(path)) append('.profile', 'export LANG=en_US.UTF8', partial=True) append('.profile', '. {}/bin/activate'.format(path), partial=True)
def download_dbsnp(genomes, bundle_version, dbsnp_version): """Download and install dbSNP variation data for supplied genomes. """ folder_name = "variation" genome_dir = os.path.join(env.data_files, "genomes") for (orgname, gid, manager) in ((o, g, m) for (o, g, m) in genomes if m.config.get("dbsnp", False)): vrn_dir = os.path.join(genome_dir, orgname, gid, folder_name) if not env.safe_exists(vrn_dir): env.safe_run("mkdir -p %s" % vrn_dir) with cd(vrn_dir): if gid in ["GRCh37", "hg19"]: _dbsnp_human(env, gid, manager, bundle_version, dbsnp_version) elif gid in ["mm10", "canFam3"]: _dbsnp_custom(env, gid)
def download_dbsnp(genomes, bundle_version, dbsnp_version): """Download and install dbSNP variation data for supplied genomes. """ folder_name = "variation" genome_dir = os.path.join(env.data_files, "genomes") for (orgname, gid, manager) in ((o, g, m) for (o, g, m) in genomes if m.config.get("dbsnp", False)): vrn_dir = os.path.join(genome_dir, orgname, gid, folder_name) if not env.safe_exists(vrn_dir): env.safe_run('mkdir -p %s' % vrn_dir) with cd(vrn_dir): if gid in ["GRCh37", "hg19"]: _dbsnp_human(env, gid, manager, bundle_version, dbsnp_version) elif gid in ["mm10", "canFam3"]: _dbsnp_custom(env, gid)
def upload_local_public_key(use_poweruser=False, local_pub_key_path='~/.ssh/id_rsa.pub'): """Append local SSH public key to remote ~/.ssh/authorized_keys. If use_poweruser is set, uses sudo (usefull in situation before the key is uploaded while other login methods being disabled). If your system has ssh-copy-id(1), you may try to use it instead of this Fabric task. """ target_user = env.user target_home = '.' acting_user = env.user remote_run = run use_sudo = False if use_poweruser: use_sudo = True remote_run = sudo acting_user = env.poweruser # switch to power user to login and create key file # (we do not allow unprivileged user login with password) with settings(hide('everything'), user=acting_user, warn_only=True): target_home = run("getent passwd {}" "|awk -F: '{{print $6}}'".format(target_user)) if not exists(target_home): print(red("User's home directory does not exist")) return pubkey_path = os.path.expanduser(local_pub_key_path) if not os.path.exists(pubkey_path): print(red("Local public key not found: {}".format(pubkey_path))) return key = ' '.join(open(pubkey_path).read().strip().split(' ')[:2]) with settings(user=acting_user), cd(target_home): remote_run('mkdir -p .ssh') # 'append' with use_sudo duplicates lines within 'cd'. # https://github.com/fabric/fabric/issues/703 # Passing 'shell=True' to append() (which is supported in # Fabric 1.6) fixes this issue. append('.ssh/authorized_keys', key, partial=True, shell=True, use_sudo=use_sudo) remote_run('chmod 600 .ssh/authorized_keys') remote_run('chmod 700 .ssh') remote_run('chown -R {0}:{0} .ssh'.format(target_user))
def install_python3(): """ Install Python3 """ # install python3.2 sudo('aptitude install -y python3.2 python3.2-dev') # install python3.3 if not exists("$HOME/Python3.3.2"): run('wget http://www.python.org/ftp/python/3.3.2/Python-3.3.2.tar.bz2') run('tar xjf Python-3.3.2.tar.bz2') with cd('Python-3.3.2'): run('./configure --prefix=/opt/python3.3') run('make') sudo('make install') sudo('ln -s /opt/python3.3/bin/* /usr/bin/.') sudo('rm -R Python-3.3.2*')
def upload_db(recreate=True): """Upload local database dump and restore on host. Drop and recreate the target database if 'recreate' argument is True. """ local('pg_dump --clean -F custom ' '-U {0} {1} > {1}.backup'.format(env.dbuser, env.db)) put('{}.backup'.format(env.db), '.') if recreate: execute(drop_db) execute(create_db) with settings(hide('stdout', 'running')): homedir = run('pwd', ) with settings(user=env.poweruser): with cd(homedir): sudo('pg_restore -d {0} {0}.backup'.format(env.db), user='******') local('/bin/rm {}.backup'.format(env.db))
def build_server_file(self): with cd(self.sys_deploy_path): run('tar -zxf ' + self.name + '.tar.gz', quiet=True) run('rm ' + self.name + '.tar.gz')
def deploy_rm(): """ Remove the remote environment """ with cd('~adam/builds/adamw523blog'): run('fig rm --force')
def deploy_kill(): """ Kill the remote environment """ with cd('~adam/builds/adamw523blog'): run('fig kill')
def deploy_up(): """ Run the environment remotely """ with cd('~adam/builds/adamw523blog'): run('fig up -d')
def extract_webapp(build_number): with cd(dist_base_dir + '/dist/' + build_number): run('unzip message-gateway.war -d ./deploy')
def git_change_branch(instance, branch): with cd("/srv/www/%s/htdocs/maproulette" % instance): sudo('git checkout %s' % branch, user="******")
def git_fetch_all(instance): with cd("/srv/www/%s/htdocs/maproulette" % instance): sudo('git fetch --all', user="******")
def update_bower_dependencies(instance): with cd("/srv/www/%s/htdocs/maproulette/maproulette/static" % instance): run('bower -q update')
def install_bower_dependencies(instance): with cd("/srv/www/%s/htdocs/maproulette/maproulette/static" % instance): run('bower -q install')
def extract_config(build_number): with cd(dist_base_dir + '/dist/' + build_number): run('tar xvf message-gateway-config.tar')