def deploy(): map_path = '/srv/www/eu4/map/app' dist_dir = path(os.getcwd(), 'vapp/') curr = path(map_path, 'current') prev = path(map_path, 'previous') # Make sure we have it built if not os.path.exists(dist_dir) or not os.path.exists( os.path.join(dist_dir, 'index.html')): abort('Build directory does not exist') # Move old version to /previous run('cp -r {0} {1}'.format(path(curr, '*'), prev)) # scp build dir to /current rsync(local_dir=dist_dir, remote_dir=curr, exclude=[]) # Add versions/symlinks later # Restart in supervisor/nginx? no since just static stuff # Symlink the config files to nginx # Maybe restart nginx if needed? print( 'UPDATE INDEX.HTML to change the BASE HREF to "/map/" and ENABLE HISTORY MODE IN VueRouter' )
def install(): if not exists(remote_base): run('mkdir -p {}'.format(remote_base)) if not exists('~/virtual'): run('ln -s /var/www/virtual/$USER/ ~/virtual') if not exists(remote_virtual): run('mkdir {}'.format(remote_virtual)) put(get_asset(asset_htaccess), join(remote_virtual, '.htaccess')) put(get_asset(asset_daemonsh), join(remote_base, 'daemon.sh')) put(get_asset(asset_configfile), join(remote_base, 'config_{}.py'.format(package_name))) with cd(remote_base): run('chmod +x daemon.sh') rsync(local_dir=local_package_base, exclude=rsync_ignore, remote_dir=remote_base + '/', delete=True) put(join(local_base, 'requirements.txt'), remote_base) with cd(remote_base): if not exists(remote_base + '/env'): run('virtualenv-2.7 env --distribute') run('env/bin/pip install -U gunicorn {}'.format(gunicorn_requirements)) run('env/bin/pip install -r ./requirements.txt') run('cp -R {}/static {}'.format(remote_package_base, remote_virtual))
def provision(): local_directory = os.path.join( os.path.dirname(env.real_fabfile), 'provision' ) if os.path.exists(local_directory + '/.local/chef.deb') is False: local('mkdir -p ' + local_directory + '/.local') with lcd(local_directory + '/.local'): local('curl -fL ' + env.chef_url + ' -o chef.deb') with hide('running'): sudo('mkdir -p ' + env.provision_dir) sudo('chown -R ' + env.user + ': ' + env.provision_dir) rsync( default_opts='-a', ssh_opts=env.ssh_opts, delete=True, exclude='nodes', local_dir=local_directory + '/', remote_dir=env.provision_dir + '/') # chef clientをインストール with cd(env.provision_dir + '/.local'): # chefがインストールされているか確認 if run('dpkg -l chef', quiet=True).failed: sudo('dpkg -i chef.deb') # chefのレシピをサーバに実行 with cd(env.provision_dir): recipe = 'recipe[' + env.application + ']' sudo('chef-client -z -c client.rb -E %s -o %s' % (env.runtime, recipe))
def deploy(*directories): #build() if len(directories) == 1: if directories[0] == 'all': # We need a copy since we're modifying the list. directories = list(departments) directories.extend([department+'x' for department in departments]) elif directories[0] == 'all-dev': directories = [department+'x' for department in departments] elif directories[0] == 'all-stable': directories = departments local('mkdir -p test/scheduler/view/web/share') for directory in directories: print('Deploying %s...' % directory) local('''echo "#this required property tells the scheduler where to write its database file to. databasefilepath=/var/lib/tomcat6/webapps/%s/DatabaseState.javaser #this optional property tells the scheduler to feed databasefilepath to # the getServletContext().getRealPath(...) function. default is true. #applyServletPath=true applyServletPath=false" > war/WEB-INF/classes/scheduler/view/web/server/scheduler.properties''' % directory) rsync(local_dir='war/*', \ remote_dir='/var/lib/tomcat6/webapps/'+directory+'/', \ exclude='.svn', \ extra_opts='--omit-dir-times --no-perms') restart_tomcat()
def provision(): local_directory = os.path.join( os.path.dirname(env.real_fabfile), 'provision' ) if os.path.exists(local_directory + '/.local/chef.deb') is False: local('mkdir -p ' + local_directory + '/.local') with lcd(local_directory + '/.local'): local('curl -fL ' + env.chef_url + ' -o chef.deb') with hide('running'): sudo('mkdir -p ' + env.provision_dir) sudo('chown -R ' + env.user + ': ' + env.provision_dir) rsync( default_opts='-a', ssh_opts=env.ssh_opts, delete=True, exclude='nodes', local_dir=local_directory + '/', remote_dir=env.provision_dir + '/') # chef clientをインストール with cd(env.provision_dir + '/.local'): # chefがインストールされているか確認 if run('dpkg -l chef', quiet=True).failed: sudo('dpkg -i chef.deb') # chefのレシピをサーバに実行 with cd(env.provision_dir): recipe = 'recipe[app]' sudo('chef-client -z -c client.rb -E %s -o %s' % (env.runtime, recipe))
def _rsync_upload(self, local_path, remote_path, **kwargs): from fabric.contrib.project import rsync_project as rsync import posixpath for key, value in self.rsync_defaults.items(): kwargs.setdefault(key, value) # Make sure we have an "/" at the end, so directory sync mode is enabled rsync_local_path = local_path.rstrip(os.sep) + os.sep rsync_remote_path = remote_path.rstrip(posixpath.sep) + posixpath.sep rsync(local_dir=rsync_local_path, remote_dir=rsync_remote_path, **kwargs)
def _sync_code(remote_code_root=CODE_ROOT) -> None: """Copies the code to the remote host. Does NOT copy data. This is simply because the VQA dataset on which we are operating is over 30 GiB, and it would be infeasible to try to sync it every time. """ run('mkdir -p {0}'.format(remote_code_root)) rsync(remote_dir=remote_code_root, local_dir='.', exclude=['.git', '.idea', '*__pycache__*', '*.pyc'])
def deploy_assets(): # Only upload the output/ assets (that we want) # maybe later upload to a hidden /data dir that's just symlinked, but whatever dest = '/srv/www/eu4/map/app/current/data/' depdir = '.deploy/' wanted = [ 'achievements', 'countries', 'eu4map', 'provdata', 'tradenodes', 'ui', 'units', 'data/_all' ] # move wanted to a temp 'build' dir os.makedirs(depdir, exist_ok=True) for f in wanted: shutil.copy(path('output', f + '.json'), depdir) rsync(local_dir=depdir, remote_dir=dest, exclude=[]) shutil.rmtree(depdir)
def _sync_data_and_code(): # TODO(andrei): '--progress' flag for rsync or pipe through 'pv'. run('mkdir -p ~/deploy/data/preprocessing') # Ensure we have a trailing slash for rsync to work as intended. folder = os.path.join('data', 'preprocessing') + '/' # 'os.path.join' does no tilde expansion, and this is what we want. remote_folder = os.path.join('~/deploy', folder) # This syncs the data (needs to be preprocessed in advance). rsync(local_dir=folder, remote_dir=remote_folder, exclude=['*.txt']) put(local_path='./train_model.py', remote_path=os.path.join('~/deploy', 'train_model.py')) # This syncs the model code. rsync(local_dir='model', remote_dir='deploy')
def sync(path, exclude=[], key=None): exclude = [".git/*", "build/*", "*.pyc"] # extras = '-L -e "ssh -i %s"' % key extras = "" result = rsync(path, delete=True, exclude=exclude, extra_opts=extras) puts(result) if result else None
def deploy(): 'update production with latest changes' # aka build, push, pull, install, rsync, restart, visit build() local('git push') with cd('~/narf.pl/links/depl'): run('git pull') run('source .environment && pip install -r ../env/reqs') for subdir_name in ['build', 'static']: rsync(local_dir=join(APP_DIR, subdir_name), remote_dir='~/narf.pl/links/app', delete=True) restart() visit()
def install(): if not exists(remote_base): run("mkdir -p {}".format(remote_base)) if not exists("~/virtual"): run("ln -s /var/www/virtual/$USER/ ~/virtual") if not exists(remote_virtual): run("mkdir {}".format(remote_virtual)) put(get_asset(asset_htaccess), join(remote_virtual, ".htaccess")) put(get_asset(asset_daemonsh), join(remote_base, "daemon.sh")) put(get_asset(asset_configfile), join(remote_base, "config_{}.py".format(package_name))) with cd(remote_base): run("chmod +x daemon.sh") rsync(local_dir=local_package_base, exclude=rsync_ignore, remote_dir=remote_base + "/", delete=True) put(join(local_base, "requirements.txt"), remote_base) with cd(remote_base): if not exists(remote_base + "/env"): run("virtualenv-2.7 env --distribute") run("env/bin/pip install -U gunicorn {}".format(gunicorn_requirements)) run("env/bin/pip install -r ./requirements.txt") run("cp -R {}/static {}".format(remote_package_base, remote_virtual))
def deploy(): local_directory = os.path.join( os.path.dirname(env.real_fabfile), 'www/share' ) with hide('running'): sudo('mkdir -p ' + env.deploy_dir) sudo('chown -R ' + env.user + ': ' + env.deploy_dir) rsync_output = rsync( default_opts='-av', ssh_opts=env.ssh_opts, delete=True, exclude=[ 'public/', 'page/dist/', 'page/node_modules/' 'page2/dist/', 'page2/node_modules/' ], local_dir=local_directory + '/', remote_dir=env.deploy_dir + '/', capture=True) files = rsync_output.split('\n') del files[-3:] files = [x for x in files if x[-1] != '/'] for i, x in enumerate(files): if i > 0 and x.find('deleting') is 0: files[i] = '\033[31m' + x + '\033[0m' elif i > 0: files[i] = '\033[32m' + x + '\033[0m' puts('\n'.join(files)) # nodeのversion check node_version = run('node -v', quiet=True) if not re.search('^v[0-9]+\.[0-9]+\.[0-9]+', node_version): abort('nodejs version parse failed') with cd(env.page_dir): run('npm install --no-save') run('npm run build') sudo('ln -nfs %s %s' % (env.page_dir + '/dist', env.public_dir))
def sync(): sudo("mkdir -p /srv") sudo("chmod a+w /srv") rsync("/srv", exclude=['.git', 'build', '*node_modules*', '*.swp', '*.pyc'])