def deploy(upgrade=1): """Deploys to the given system. Use salt, chef, or puppet to configure the outside packages. Things required to be set up: - python - database driver - virtualenv - coffeescript - java - pip - database (postgres; postgres user) - created database & user - webserver (nginx; www-data user) - webserver config to proxypass to gunicorn (nginx) - memcached """ upload_monit_conf() clean() with cd('/www/yacs/'): if not exists('virtualenv'): puts('Creating Virtual Environment...') sudo('virtualenv --distribute virtualenv', user=USER) puts('Uploading to remote...') with settings(warn_only=True): run('rm -rf tmp') run('mkdir tmp') upload_project(remote_dir='tmp') sudo('mv -f tmp/yacs /www/yacs/tmp') sudo('chown -R %s /www/yacs/tmp' % USER) sudo('chgrp -R %s /www/yacs/tmp' % GROUP) run('rm -rf tmp') with cd('/www/yacs/'): puts('Replacing remote codebase...') sudo('rm -rf django', user=USER) sudo('mv -f tmp django', user=USER) with cd('/www/yacs/django'): puts('Removing extra files...') with settings(warn_only=True): sudo('find . -name ".*" | xargs rm -r', user=USER) sudo('rm yacs.db', user=USER) puts('Installing dependencies...') prefix = '--upgrade' if not int(upgrade): prefix = '' sudo(PIP + ' install %s -r requirements/deployment.txt' % prefix, user=USER) sudo(PIP + ' install %s %s ' % (prefix, ADDITIONAL_PACKAGES), user=USER) puts('Running migrations...') managepy('syncdb --noinput') managepy('migrate --noinput') puts('Gathering static files...') managepy('collectstatic --noinput') puts("Clearing caches...") sudo('service memcached restart') managepy('clear_cache') puts('Restarting gunicorn...') sudo('service monit restart') sudo('monit restart yacs') update_crontab() puts('Done!')
def deploy_ask(): sudo('service tomcat stop') sudo('rm -rf /opt/tomcat/webapps/ask') upload_project(local_dir='./ttsd-ask-web/war/ask.war', remote_dir='/opt/tomcat/webapps') sudo('service tomcat start') sudo('service nginx restart')
def _config_nginx(): upload_project(NGINX_CONFIG) sudo( 'mv /etc/nginx/sites-available/default /etc/nginx/sites-available/default.orig' ) sudo('mv simple_nginx_config /etc/nginx/sites-available/default') sudo('/etc/init.d/nginx restart')
def push_project(): """ Push out new code to the server """ with settings(warn_only=True): with cd("/tmp/"): project.upload_project() run("rm -rf /home/%(user)s/%(project_name)s/django-carpool" % env) run("cp -rf apps/django-carpool /home/%(user)s/%(project_name)s/django-carpool" % env)
def deploy_point(): sudo('service tomcat stop') sudo('rm -rf /opt/tomcat/webapps/ROOT') upload_project(local_dir='./ttsd-point-web/war/ROOT.war', remote_dir='/opt/tomcat/webapps') sudo('service tomcat start') sudo('service nginx restart')
def copy_to_server(): "Upload the app to a versioned path." # Ensure the deployment directory is there... with cd(env.path): run("mkdir -p deploys/%(version)s" % env) upload_project(local_dir="./target", remote_dir="%(path)s/deploys/%(version)s" % env)
def upload(sync=True): "Uploads all pillars, modules, and states to the remote server." runner.action("Upload Salt Data") with runner.with_prefix(' ~> '): runner.state("Clear existing data") sudo('rm -rf {0}'.format(SALT_DIR)) sudo('mkdir -p {0}'.format(SALT_DIR)) for system in env.configs: runner.state("Upload configuration: " + system) for dirname in ('states', 'pillars'): src = os.path.join(CONFIG_DIR, system, dirname) path = os.path.join(SALT_DIR, system) dest = os.path.join(path, dirname) runner.state(" - {0}/{1}".format(system, dirname)) upload_project(src) # remove dot files sudo('find {0} -name ".*" | xargs rm -rf'.format(dirname)) # remove pyc files sudo('find {0} -name "*.pyc" | xargs rm -rf'.format(dirname)) with settings(warn_only=True): sudo('mkdir -p {0}'.format(path)) sudo('mv {0} {1}'.format(dirname, dest)) if sync: runner.state("Sync pillar data to minions") sudo("salt '*' saltutil.refresh_pillar") runner.state("Sync states and modules to minions") sudo("salt '*' saltutil.sync_all")
def install_shorewall(): fabtools.require.deb.package('shorewall') upload_project('assets/shorewall/', '/etc/') with settings(hide('running'), shell_env()): sed('/etc/default/shorewall', 'startup=0', 'startup=1') run('shorewall start')
def upload(): "Upload project" project = os.path.dirname(__file__) remote = env.path run('mkdir -p %s' % remote) with cd(remote): upload_project(project, remote)
def deploy_anxin(): upload_project(local_dir='./ttsd-anxin-wrapper/war/ROOT.war', remote_dir='/workspace/anxin/war') with cd('/workspace/anxin'): sudo('/usr/local/bin/docker-compose -f anxin.yml stop') sudo('/usr/local/bin/docker-compose -f anxin.yml rm -f') sudo('rm -rf ROOT') sudo('/usr/local/bin/docker-compose -f anxin.yml up -d')
def test_project_is_archived_locally(self): """The project should be archived locally before being uploaded.""" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args(arg.startswith("tar -czf")).next_call() # Exercise project.upload_project()
def deploy_static(): mk_static_zip() upload_project( local_dir='./ttsd-frontend-manage/resources/prod/static_all.zip', remote_dir='/workspace') with cd('/workspace'): sudo('rm -rf static/') sudo('unzip static_all.zip -d static') sudo('service nginx restart')
def deploy_console(): upload_project(local_dir='./ttsd-console/war/ROOT.war', remote_dir='/workspace/console') upload_project(local_dir='./ttsd-activity-console/war/ROOT.war', remote_dir='/workspace/activity-console') with cd('/workspace'): sudo('/usr/local/bin/docker-compose -f console.yml -p ttsd stop') sudo('/usr/local/bin/docker-compose -f console.yml -p ttsd rm -f') sudo('/usr/local/bin/docker-compose -f console.yml -p ttsd up -d')
def copyfiles(): """ rebuild the whole project directory on remotes """ # copy files from settings import CRPC_ROOT with settings(warn_only=True): local('find {0} -name "*.pyc" -delete'.format(CRPC_ROOT)) run("rm -rf /srv/crpc") run("mkdir -p /sev/crpc") #put(CRPC_ROOT+"/*", "/srv/crpc/") upload_project(CRPC_ROOT+'/', '/srv/')
def deploy_ask_rest(): upload_project( local_dir='./ttsd-ask-rest/build/distributions/ttsd-ask-rest.zip', remote_dir='/workspace/rest-service') with cd('/workspace/rest-service'): sudo('/usr/local/bin/docker-compose -f ask-rest.yml stop') sudo('/usr/local/bin/docker-compose -f ask-rest.yml rm -f') sudo('rm -rf ttsd-ask-rest') sudo('unzip ttsd-ask-rest.zip') sudo('/usr/local/bin/docker-compose -f ask-rest.yml up -d')
def configure(): "configure the Hackasaurus server" secrets.build_secrets_manifest(env['host']) run('rm -rf /root/deployment') run('mkdir /root/deployment') with cd('/root/deployment'): upload_project(path('..', 'manifests')) upload_project(path('..', 'modules')) put(path('run-on-server', 'bootstrap.py'), '.') run('python bootstrap.py')
def install(instance_id): instance_dir = env['sarge_home'] / instance_id upload_project(str(project_dir / 'tests'), str(instance_dir)) put(str(project_dir / 'convert.py'), str(instance_dir)) put(str(project_dir / 'run_tests.py'), str(instance_dir)) put(StringIO(CONVERT_SCRIPT.format(instance_dir=instance_dir, **env)), str(env['sarge_home'] / 'bin' / 'convert'), mode=0755) put(StringIO(RUN_TESTS_SCRIPT.format(instance_dir=instance_dir, **env)), str(env['sarge_home'] / 'bin' / 'run_tests'), mode=0755)
def testapp(): install_protobuf() with cd('~/projects'): upload_project('./receiver', '/home/sysadmin/projects') ubuntu.upstart("receiver", "app.js", user=env.remote_admin , home="/home/%s" % env.remote_admin ,location_dir='/home/sysadmin/projects/receiver/') sudo("stop receiver") sudo("start receiver")
def upload(self, schema, workcopy): tmpdir = os.path.join(myenv.tmp, os.path.basename(tempfile.mktemp())) run("mkdir '%s'" % tmpdir) try: with cd(tmpdir): #it's a bug in project.upload_project, it had been fixed in latest release project.upload_project(workcopy, tmpdir) schema.push_to_release(os.path.join(tmpdir, os.path.basename(workcopy))) finally: run("rm -rf '%s'" % tmpdir)
def test_path_to_local_project_no_separator(self): """Local folder can have no path separator (in current directory).""" project_path = "testpath" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args(arg.endswith("-C . testpath")).next_call() # Exercise project.upload_project(local_dir=project_path)
def upload(self, schema, workcopy): tmpdir = os.path.join(myenv.tmp, os.path.basename(tempfile.mktemp())) run("mkdir '%s'" % tmpdir) try: with cd(tmpdir): project.upload_project(workcopy, tmpdir) schema.overwrite_to_release(os.path.join(tmpdir, os.path.basename(workcopy))) finally: run("rm -rf '%s'" % tmpdir)
def test_path_to_local_project_can_be_specified(self): """It should be possible to specify which local folder to upload.""" project_path = "path/to/my/project" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C path/to/my project")).next_call() # Exercise project.upload_project(local_dir=project_path)
def test_current_directory_is_uploaded_by_default(self): """By default the project uploaded is the current working directory.""" cwd_path, cwd_name = os.path.split(os.getcwd()) # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C %s %s" % (cwd_path, cwd_name))).next_call() # Exercise project.upload_project()
def install(): """Install the pipeline on the specified cluster """ logger.debug("Installing pipeline...") local_root = os.environ['ROOT'] remote_root = app_config['root'] if local_exec: if abspath(local_root) == abspath(remote_root): logger.error("Source and destination folder are the same") exit(1) if exists(remote_root): if confirm("Existing data will be deleted. Do you want to proceed anyway?", default=False): rmtree(remote_root) else: logger.error("Pipeline destination folder already exists") exit(2) copytree(local_root, remote_root) local(remote_root+'/utils/install.sh') else: if app_config["use_sudo"]: run_fn = sudo else: run_fn = run if not fab_exists(remote_root): logging.debug("Building remote directory...") run_fn("mkdir -p "+remote_root) else: if not confirm("Existing data will be deleted. Do you want to proceed anyway?", default=False): logger.error("Pipeline destination folder already exists") exit(2) logging.debug("Uploading project...") upload_project( local_dir=local_root, remote_dir=remote_root, use_sudo=app_config["use_sudo"] ) if run_fn(remote_root+"/utils/auth.sh").failed: logger.error("An error occured with modifying the right for the pipeline") exit(3) if run(remote_root+"/utils/install.sh").failed: logger.error("An error occured with the install script") exit(4) logger.info("Pipeline successfully installed")
def test_path_to_local_project_can_end_in_separator(self): """A local path ending in a separator should be handled correctly.""" project_path = "path/to/my" base = "project" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C %s %s" % (project_path, base))).next_call() # Exercise project.upload_project(local_dir="%s/%s/" % (project_path, base))
def test_default_remote_folder_is_home(self): """Project is uploaded to remote home by default.""" local_dir = "folder" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_put.with_args("%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz", use_sudo=False).next_call() # Exercise project.upload_project(local_dir=local_dir)
def test_current_directory_is_uploaded_by_default(self): """By default the project uploaded is the current working directory.""" cwd_path, cwd_name = os.path.split(os.getcwd()) # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C %s %s" % (cwd_path, cwd_name)) ).next_call() # Exercise project.upload_project()
def test_path_to_local_project_can_be_specified(self): """It should be possible to specify which local folder to upload.""" project_path = "path/to/my/project" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C path/to/my project") ).next_call() # Exercise project.upload_project(local_dir=project_path)
def chef(): local("berks install --path=vendor/cookbooks") chef_binary = "/var/lib/gems/1.8/bin/chef-solo" if not exists(chef_binary): bootstrap() if not exists('/tmp/chef'): run('mkdir /tmp/chef') if not exists('/tmp/roles'): run('mkdir /tmp/roles') run('rm -rf /tmp/chef/*') run('rm -rf /tmp/roles/*') upload_project(local_dir="vendor/cookbooks", remote_dir="/tmp/chef") put('roles/monitor.json', '/tmp/roles/') put('monitor.json', '/tmp/chef/cookbooks') put('solo.rb', '/tmp/chef/cookbooks') run('cd /tmp/chef/cookbooks && sudo /var/lib/gems/1.8/bin/chef-solo -c solo.rb -j monitor.json')
def test_default_remote_folder_is_home(self): """Project is uploaded to remote home by default.""" local_dir = "folder" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_put.with_args( "%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz", use_sudo=False ).next_call() # Exercise project.upload_project(local_dir=local_dir)
def test_path_to_local_project_no_separator(self): """Local folder can have no path separator (in current directory).""" project_path = "testpath" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C . testpath") ).next_call() # Exercise project.upload_project(local_dir=project_path)
def uploadBundle(self, release_name): releaseZip = '%s.zip' % release_name print yellow('\n>> Uploading newest release to remote server') with hideOutput(): upload_project(local_dir=join(arke.Core.paths['base'], releaseZip), remote_dir=arke.Core.paths['releases'], use_sudo=True) with cd(arke.Core.paths['releases']): sudo('unzip %s -d ./%s; rm -rf %s' % (releaseZip, release_name, releaseZip)) lbash('rm -f "%s"' % releaseZip) print green('>> Done uploading newest release')
def full_deploy(): update_apt() install_python3_prereqs() install_python3() fetch_get_pip_script() install_pip3() install_pip2() render_templates() upload_project(remote_dir=config.remote_dir) install_requirements() install_supervisor() update_boot_file() reboot() update_iptables()
def test_path_to_remote_folder_can_be_specified(self): """It should be possible to specify which local folder to upload to.""" local_dir = "folder" remote_path = "path/to/remote/folder" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_put.with_args("%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path, use_sudo=False).next_call() # Exercise project.upload_project(local_dir=local_dir, remote_dir=remote_path)
def test_path_to_remote_folder_can_be_specified(self): """It should be possible to specify which local folder to upload to.""" local_dir = "folder" remote_path = "path/to/remote/folder" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_put.with_args( "%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path, use_sudo=False ).next_call() # Exercise project.upload_project(local_dir=local_dir, remote_dir=remote_path)
def test_path_to_local_project_can_end_in_separator(self): """A local path ending in a separator should be handled correctly.""" project_path = "path/to/my" base = "project" # local() is called more than once so we need an extra next_call() # otherwise fudge compares the args to the last call to local() self.fake_local.with_args( arg.endswith("-C %s %s" % (project_path, base)) ).next_call() # Exercise project.upload_project(local_dir="%s/%s/" % (project_path, base))
def move_project_to_www(local_path, remote_path): local_path = os.path.abspath(local_path) sudo('mkdir /var/www || true') depl_tmp = '/var/www/tmp_depl' sudo('mkdir %s || true' % depl_tmp) upload_project(local_path, depl_tmp, use_sudo=True) _, local_name = os.path.split(local_path) # delete all the files in the target directory that are also in the source # directory and move the source. sudo('mkdir %s || true' % remote_path) sudo('ls -A {from_p} | xargs -I [] sh -c ' '"rm -rf {to_p}/[] || true; mv {from_p}/[] {to_p}"'.format( from_p=os.path.join(depl_tmp, local_name), to_p=remote_path)) sudo('rm -rf %s' % depl_tmp) sudo('chown -R www-data:www-data ' + remote_path)
def _copy_directory(self, resource, _from, _to, use_sudo=False): executor = lambda transport: fabric_project.upload_project( remote_dir=_to, local_dir=_from, use_sudo=use_sudo ) return executor
def wrp(transport): self._ensure_remote_dir_exists(resource, _from, _to, use_sudo) return fabric_project.upload_project( remote_dir=_to, local_dir=_from, use_sudo=use_sudo )
def upload(): project.upload_project(env.export_srv, env.server_home) run('mv \"%s\" \"%s\"' % (env.server_home + '/' + os.path.basename(env.export_srv), \ env.server_path_dist)) key_path_def = os.path.join(env.server_dir, 'server.key') crt_path_def = os.path.join(env.server_dir, 'server.crt') key_path = os.path.join(env.config_dir, env.host_string, 'server.key') crt_path = os.path.join(env.config_dir, env.host_string, 'server.crt') if os.path.isfile(key_path): # upload host specific key project.upload_project(key_path, env.server_path_dist) elif os.path.isfile(key_path_def): # upload default key project.upload_project(key_path_def, env.server_path_dist) else: raise (Exception('ERROR: Server private key is not found')) if os.path.isfile(crt_path): # upload host specific certificate project.upload_project(crt_path, env.server_path_dist) elif os.path.isfile(crt_path_def): # upload default certificate project.upload_project(crt_path_def, env.server_path_dist) else: raise (Exception('ERROR: Server certificate is not found')) # fix permissions run('find \"%s\" -type f -print0 | xargs -0 chmod %d' % (env.server_path_dist, env.perm_file)) run('find \"%s\" -type d -print0 | xargs -0 chmod %d' % (env.server_path_dist, env.perm_dir)) run('chmod +x %s/server.py' % env.server_path_dist)
def staging() : # path to the directory on the server where your vhost is set up path = "/var/www/clients/{FILLINTHEBLANK}" domain = "" # name of the application process process = "staging" version_number = prompt("enter version number") print(green("Moving created HTML")) local("mv converted-html %s" % version_number) print(red("Beginning Deploy:")) print(green("Checking for %s directory" % version_number)) run("rm -rf %s/%s/" % (path, version_number)) upload_project("./%s/" % version_number, path) local("rm -rf %s" % version_number) print(red("DONE!"))
def update_state_and_pillar_files(): """Updates state and pillar files by uploading them to ``/srv/salt_roots``. First it checks whether folder is present. If folder present it will be deleted. After all ``salt_roots`` folder is uploaded to ``/srv`` and owner is changed to root. """ if exists('/srv/salt_roots'): sudo('rm -rf /srv/salt_roots') upload_project('salt/salt_roots', '/srv', use_sudo=True) sudo('chown root:root -R /srv/salt_roots') sudo('chmod 600 -R /srv/salt_roots')
def copy_salt_roots_and_pillars(): """Copy salt files to remote host.""" if not exists(LOCAL_SALT_PATH_ROOTS): sudo("mkdir -p %s" % LOCAL_SALT_PATH_ROOTS) if not exists(LOCAL_SALT_PATH_PILLAR): sudo("mkdir -p %s" % LOCAL_SALT_PATH_PILLAR) upload_project(local_dir=LOCAL_SALT_PATH_ROOTS, remote_dir=REMOTE_SALT_PATH_ROOTS, use_sudo=True) upload_project(local_dir=LOCAL_SALT_PATH_PILLAR, remote_dir=REMOTE_SALT_PATH_PILLAR, use_sudo=True) put(LOCAL_MINION_CONFIG_PATH, REMOTE_MINION_CONFIG_PATH, use_sudo=True) sudo("service salt-minion restart")
def move_project_to_www(local_path, remote_path): local_path = os.path.abspath(local_path) sudo("mkdir /var/www || true") depl_tmp = "/var/www/tmp_depl" sudo("mkdir %s || true" % depl_tmp) upload_project(local_path, depl_tmp, use_sudo=True) _, local_name = os.path.split(local_path) # delete all the files in the target directory that are also in the source # directory and move the source. sudo("mkdir %s || true" % remote_path) sudo( "ls -A {from_p} | xargs -I [] sh -c " '"rm -rf {to_p}/[] || true; mv {from_p}/[] {to_p}"'.format( from_p=os.path.join(depl_tmp, local_name), to_p=remote_path ) ) sudo("rm -rf %s" % depl_tmp) sudo("chown -R www-data:www-data " + remote_path)
def setup(): packages = BASIC_PACKAGES #run('apt-get install -y %s' %' '.join(packages)) package_install(packages) package_ensure(packages) # start elasticsearch with cd(ROOT): run('find . -name *.pyc |xargs rm -rf ') with settings(warn_only=True): run("rm -rf {0}".format(ROOT)) run("mkdir -p {0}".format(ROOT)) upload_project(ROOT,'/opt/') with cd(os.path.join(ROOT,'scripts')): run('. setupenv.sh')
def createSymbolicLinks(self, release_name, deployMode): curReleaseDir = join(arke.Core.paths['releases'], release_name) print yellow('\n>> Creating links between shared files') for arr in arke.Core.options['project']['fileStructure']['shared']: if len(arr) == 1: arr = [arr[0], arr[0]] nodeOriginFullPath = join(curReleaseDir, arr[0]) nodeTargetFullPath = join(arke.Core.paths['shared'], arr[1]) print cyan('>>> Linking: current/%s -> shared/%s' % tuple(arr)) with hideOutput(): if is_link(nodeOriginFullPath): sudo('unlink %s' % (nodeOriginFullPath)) elif exists(nodeOriginFullPath): sudo('rm -rf %s' % (nodeOriginFullPath)) sudo('ln -sfv %s %s' % (nodeTargetFullPath, nodeOriginFullPath)) print green('>> Done linking shared files and folders') if deployMode != 'bundle' and 'toUpload' in arke.Core.options[ 'project']['fileStructure']: print yellow('\n>> Sending all files/folders listed on "toUpload"') for arr in arke.Core.options['project']['fileStructure'][ 'toUpload']: if len(arr) == 1: arr = [arr[0], arr[0]] nodeOriginFullPath = join(arke.Core.paths['base'], arr[0]) nodeTargetFullPath = join(curReleaseDir, arr[1]) print cyan('>>> Uploading: %s -> %s' % tuple(arr)) with hideOutput(): upload_project(local_dir=nodeOriginFullPath, remote_dir=nodeTargetFullPath, use_sudo=True) print green('>> Done uploading files and folders') self.fixPermissions()
def deploy_prod(): prod() git_path = "/home/ec2-user/photo/photo.git" process = "deploying" print(red("Beginning Deploy:")) with cd(git_path): print(red('pushing master to production')) local('git push production master') sudo('git --work-tree=/mnt/current checkout -f master') run('source /opt/apps/photo-env/bin/activate') sudo('cd /mnt/current && pip install -r requirements.txt') sudo('python /mnt/current/manage.py migrate') print(green('master pushed to production')) # local('python /home/ryan/Dev/ryanfisher/photo/manage.py collectstatic --noinput --settings=project.settings.production') project.upload_project( remote_dir = '/mnt/current', local_dir = '/home/ryan/Dev/ryanfisher/photo/static_root', use_sudo = True ) print(red('updating static files')) sudo('restart uwsgi')
def _deploy(self): run('apt-get update') run('apt-get -y install python python-pip build-essential libssl-dev libffi-dev python-dev gcc' ) run('pip install twisted tabulate structlog') if exists(self.remote['basePath']): run('rm -r ' + self.remote['basePath']) run('mkdir -p ' + self.remote['modulePath']) from os import path from fabric.contrib.project import upload_project localPath = path.join(self.local['modulesPath'], self.conf['coreModule']) remotePath = path.join(self.remote['basePath'], '..') upload_project(local_dir=localPath, remote_dir=remotePath) localPath = self.local['apiPath'] remotePath = self.remote['basePath'] upload_project(local_dir=localPath, remote_dir=remotePath) put(self.local['configPath'], self.remote['configPath'], 0500)
def deploy_sign_in(): for i in ('1', '2'): local("echo sign in start...") folder_name = 'signin_{0}'.format(i) local('echo sign in start...' + folder_name) try: local("echo sign in upload") upload_project(local_dir='./ttsd-user-rest-service/{0}.zip'.format( folder_name), remote_dir='/workspace') logging.info("sign in upload done") except Exception as e: local("echo " + e.message) raise e with cd('/workspace'): sudo('rm -rf {0}'.format(folder_name)) sudo('unzip {0}.zip -d {0}'.format(folder_name)) with cd('/workspace/{0}'.format(folder_name)): sudo('/usr/local/bin/docker-compose -f prod.yml -p ttsd stop') sudo('/usr/local/bin/docker-compose -f prod.yml -p ttsd rm -f') sudo('/usr/local/bin/docker-compose -f prod.yml -p ttsd up -d') sudo('service nginx restart')
def upload(): run('mkdir -p ~/PycharmProjects') upload_project('../{{cookiecutter.project_name}}', '~/PycharmProjects')
def test_temp_folder_is_used(self): """A unique temp folder is used for creating the archive to upload.""" # Exercise project.upload_project()
def deploy(upgrade=1): """Deploys to the given system. Use salt, chef, or puppet to configure the outside packages. Things required to be set up: - python - database driver - virtualenv - coffeescript - java - pip - database (postgres; postgres user) - created database & user - webserver (nginx; www-data user) - webserver config to proxypass to gunicorn (nginx) - memcached """ upload_monit_conf() clean() with cd('/www/yacs/'): if not exists('virtualenv'): puts('Creating Virtual Environment...') sudo('virtualenv --distribute virtualenv', user=USER) puts('Uploading to remote...') with settings(warn_only=True): run('rm -rf tmp') run('mkdir tmp') upload_project(remote_dir='tmp') sudo('mv -f tmp/yacs /www/yacs/tmp') sudo('chown -R %s /www/yacs/tmp' % USER) sudo('chgrp -R %s /www/yacs/tmp' % GROUP) run('rm -rf tmp') with cd('/www/yacs/'): puts('Replacing remote codebase...') sudo('rm -rf django', user=USER) sudo('mv -f tmp django', user=USER) with cd('/www/yacs/django'): puts('Removing extra files...') with settings(warn_only=True): sudo('find . -name ".*" | xargs rm -r', user=USER) sudo('rm yacs.db', user=USER) puts('Installing dependencies...') pip_prefix = '--upgrade' if not int(upgrade): pip_prefix = '' sudo(PIP + ' install %s -r requirements.txt' % pip_prefix, user=USER) envs = remote_vars('YACS_ENV', 'YACS_SECRET_KEY', 'YACS_DATABASE_URL') puts('Running migrations...') managepy('syncdb --noinput', envs) managepy('migrate --noinput', envs) puts('Gathering static files...') managepy('collectstatic --noinput', envs) puts("Clearing caches...") sudo('service memcached restart') managepy('clear_cache', envs) puts('Restarting gunicorn...') sudo('service monit restart') sudo('monit restart yacs') update_crontab() puts('Done!')
def upload_dir(local_dir, remote_dir): ''' Uploads a local directory to the remote path. ''' project.upload_project(local_dir, remote_dir)
def nginx_ssl_setup(): upload_project(os.path.join(env.local_dir, 'docker-services.yml'), '/srv/', use_sudo=True) run('docker-compose -f /srv/docker-services.yml up -d nginx-proxy letsencrypt-plugin')