def update_symbolic_links(build_number): with cd(dist_base_dir): if is_link('config', verbose=True): run('unlink config') run('ln -s ./dist/%s/config config' % build_number) if is_link('deploy', verbose=True): run('unlink deploy') run('ln -s ./dist/%s/deploy deploy' % build_number)
def upload(self, release_id): from fabric.contrib import files import posixpath upload_storage_path = self._abs_path(self.upload_storage_path) self._ensure_path_exists(upload_storage_path) upload_release_path = self._get_upload_release_path(release_id) self._ensure_path_exists(upload_release_path) upload_storage_current_link = self._path_join(upload_storage_path, self.upload_storage_current_link) for local_path, remote_path in self.upload_paths: if files.exists(remote_path) and not files.is_link(remote_path): raise RuntimeError('Remote path already exists, but is no symlink (%s)' % remote_path) remote_upload_path = self._get_remote_upload_path(release_id, remote_path) if self._exists(upload_storage_current_link) and not self._exists(remote_upload_path): remote_upload_pathname = self._get_remote_upload_pathname(release_id, remote_path) active_remote_upload_path = self._path_join(upload_storage_current_link, remote_upload_pathname) cp_active_remote_upload_path = active_remote_upload_path.rstrip(posixpath.sep) + posixpath.sep cp_remote_upload_path = remote_upload_path.rstrip(posixpath.sep) + posixpath.sep self._run('{cp_bin} {cp_args} {from_path} {to_path}'.format( cp_bin=self.cp_bin(), cp_args=self.cp_args, from_path=shell_quote(cp_active_remote_upload_path), to_path=shell_quote(cp_remote_upload_path), )) self._rsync_upload(local_path, remote_upload_path)
def git_pull(domain,env='test'): directory = domain if exists('/tmp/{0}.mounted'.format(domain), use_sudo=True): sudo('umount -l /srv/www/{0}/media'.format(domain)) run('rm /tmp/{0}.mounted'.format(domain)) with cd("/srv/www/{0}".format(domain)): run("git fetch --all",warn_only=True) run("git reset --hard origin/master",warn_only=True) sudo('chown -R www-data:www-data /srv/www/{0}'.format(domain)) sudo('chmod -R g+rw /srv/www/{0}'.format(domain)) run('cp -R /srv/www/{0}/media /tmp/{0}'.format(directory)) cp_media = '/tmp/{0}'.format(directory) #mount nfs share on lb1 and update fstab sudo('sudo mount -t nfs -o proto=tcp,port=2049 192.168.150.78:/export/{0}/media /srv/www/{0}/media'.format(directory)) run('cp -R {0}/* /srv/www/{1}/media/. '.format(cp_media,directory)) run('rm -rf {0}'.format(cp_media)) run('touch /tmp/{0}.mounted'.format(domain)) else: with cd("/srv/www/{0}".format(domain)): #run("git pull",warn_only=True) run("git fetch --all",warn_only=True) run("git reset --hard origin/master",warn_only=True) if is_link('/srv/www/{0}/app/etc/local.xml'.format(domain)): run('rm /srv/www/{0}/app/etc/local.xml'.format(domain)) run('ln -s /srv/www/{1}/app/etc/local.xml.{0} /srv/www/{1}/app/etc/local.xml'.format(env,directory))
def create_config_symlink(domain,directory='',env='dev'): if directory=='': directory = domain if is_link('/srv/www/{0}/app/etc/local.xml'.format(directory)): run('rm /srv/www/{0}/app/etc/local.xml'.format(directory)) run('ln -s /srv/www/{1}/app/etc/local.xml.{0} /srv/www/{1}/app/etc/local.xml'.format(env,directory))
def create_eng_instance(instance): """ Create a new kraken instance (idempotent) * Install requirements * Deploy the binary, the templatized ini configuration in a dedicated directory with rights to www-data and the logdir * Deploy initscript and add it to startup * Start the service """ instance = get_real_instance(instance) for host in instance.kraken_engines: with settings(host_string=host): # base_conf require.files.directory(instance.kraken_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) # logs require.files.directory(env.kraken_log_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) update_eng_instance_conf(instance, host) # kraken.ini, pid and binary symlink kraken_bin = "{}/{}/kraken".format(env.kraken_basedir, instance.name) if not is_link(kraken_bin): idempotent_symlink("/usr/bin/kraken", kraken_bin, use_sudo=True) sudo('chown -h {user} {bin}'.format(user=env.KRAKEN_USER, bin=kraken_bin)) kraken = "kraken_{}".format(instance.name) if not service.is_running(kraken): # TODO test this on systemd machines if env.use_systemd: sudo("systemctl enable kraken_{}.service".format( instance.name)) else: sudo("update-rc.d kraken_{} defaults".format( instance.name)) print( blue( "INFO: kraken {instance} instance is starting on {server}, " "waiting 5 seconds, we will check if processus is running" .format(instance=instance.name, server=get_host_addr(env.host_string)))) service.start(kraken) run("sleep 5") # we wait a bit for the kraken to pop with settings(warn_only=True): run("pgrep --list-name --full /srv/kraken/{}/kraken".format( instance.name)) print( blue("INFO: kraken {instance} instance is running on {server}". format(instance=instance.name, server=get_host_addr(host))))
def create_eng_instance(instance): """Create a new kraken instance * Install requirements (idem potem) * Deploy the binary, the templatized ini configuration in a dedicated directory with rights to www-data and the logdir * Deploy initscript and add it to startup * Start the service """ instance = get_real_instance(instance) for host in instance.kraken_engines: with settings(host_string=host): # base_conf require.files.directory(instance.kraken_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) # logs require.files.directory(env.kraken_log_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) update_eng_instance_conf(instance) # kraken.ini, pid and binary symlink kraken_bin = "{}/{}/kraken".format(env.kraken_basedir, instance.name) if not is_link(kraken_bin): files.symlink("/usr/bin/kraken", kraken_bin, use_sudo=True) sudo('chown -h {user} {bin}'.format(user=env.KRAKEN_USER, bin=kraken_bin)) #run("chmod 755 /etc/init.d/kraken_{}".format(instance)) # TODO refactor this and test it on systemd and non-systemd machines if not env.use_systemd: sudo("update-rc.d kraken_{} defaults".format(instance.name)) print( blue( "INFO: Kraken {instance} instance is starting on {server}, " "waiting 5 seconds, we will check if processus is running". format(instance=instance.name, server=get_host_addr(env.host_string)))) service.start("kraken_{}".format(instance.name)) run("sleep 5") # we wait a bit for the kraken to pop # test it ! # execute(test_kraken, get_host_addr(env.host_string), instance, fail_if_error=False) with settings(warn_only=True): run("pgrep --list-name --full {}".format(instance.name)) print( blue("INFO: kraken {instance} instance is running on {server}". format(instance=instance.name, server=get_host_addr(env.host_string))))
def cp_site(copy='',paste='',env='dev'): if not exists('/srv/www/{0}'.format(paste)): run('cp -R /srv/www/{0} /srv/www/{1}'.format(copy,paste)) if is_link('/srv/www/{0}/app/etc/local.xml'.format(paste)): run('rm /srv/www/{0}/app/etc/local.xml'.format(paste)) run('ln -s /srv/www/{1}/app/etc/local.xml.{0} /srv/www/{1}/app/etc/local.xml'.format(env,paste)) sudo('chown -R www-data:www-data /srv/www/{0}'.format(paste)) sudo('chmod -R g+rw /srv/www/{0}'.format(paste))
def _link(links): for base, sublinks in links.items(): for sublink, target in sublinks.items(): source = '{}/{}'.format(base, sublink) target = '{}/{}'.format(CODE_BASE, target) if files.is_link(source): continue run(' && '.join([ 'sudo rm -rf {}'.format(source), 'cd $(dirname {})'.format(source), 'sudo ln -s {} $(basename {})'.format(target, source) ]))
def _link(links): for base, sublinks in links.items(): for sublink, target in sublinks.items(): source = '{0}/{1}'.format(base, sublink) target = '{0}/{1}'.format(CODE_BASE, target) if files.is_link(source): continue run(' && '.join([ 'sudo rm -rf {0}'.format(source), 'cd $(dirname {0})'.format(source), 'sudo ln -s {0} $(basename {1})'.format(target, source) ]))
def setup_configuration_files(): for remotepath, localpath, context in FILES: if callable(context): context = context() files.upload_template(localpath, remotepath, context, use_sudo=True, backup=False) sudo("chown root:root {}".format(remotepath)) if not files.is_link("/etc/nginx/sites-enabled/{name}".format(name=PROJECT_NAME)): sudo("ln -s /etc/nginx/sites-available/{name} /etc/nginx/sites-enabled/{name}".format(name=PROJECT_NAME)) sudo("chmod +x {home_dir}/kap10-deploy.sh".format(home_dir=home_dir)) sudo("chmod +x {home_dir}/kap10".format(home_dir=home_dir))
def git_clone(domain,directory='',env='dev'): if directory=='': directory = domain if not exists('/srv/www/{0}'.format(domain)): with cd("/srv/www/"): run("git clone [email protected]:<github>/{0}.git {1}".format(domain, directory),warn_only=True) if is_link('/srv/www/{0}/app/etc/local.xml'.format(domain)): run('rm /srv/www/{0}/app/etc/local.xml'.format(domain)) run('ln -s /srv/www/{1}/app/etc/local.xml.{0} /srv/www/{1}/app/etc/local.xml'.format(env,directory)) sudo('chown -R www-data:www-data /srv/www/{0}'.format(directory)) sudo('chmod -R g+rw /srv/www/{0}'.format(directory))
def configure_website(website_local_directory=WEBSITE_LOCAL_DIRECTORY): """ Task for *colour-science.local* website configuration. Parameters ---------- website_local_directory : unicode Website local directory. """ provider_directory = '/var/www' if not is_link(provider_directory): sudo(('sed -i "s/AllowOverride None/AllowOverride All/g" ' '/etc/apache2/apache2.conf')) sudo('rm -rf {0}'.format(provider_directory)) sudo('ln -fs {0} {1}'.format( website_local_directory, provider_directory)) sudo('a2enmod rewrite') sudo('service apache2 restart')
def create_eng_instance(instance): """Create a new kraken instance * Install requirements (idem potem) * Deploy the binary, the templatized ini configuration in a dedicated directory with rights to www-data and the logdir * Deploy initscript and add it to startup * Start the service """ instance = get_real_instance(instance) for host in instance.kraken_engines: with settings(host_string=host): # base_conf require.files.directory(instance.kraken_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) # logs require.files.directory(env.kraken_log_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) update_eng_instance_conf(instance) # kraken.ini, pid and binary symlink kraken_bin = "{}/{}/kraken".format(env.kraken_basedir, instance.name) if not is_link(kraken_bin): files.symlink("/usr/bin/kraken", kraken_bin, use_sudo=True) sudo('chown -h {user} {bin}'.format(user=env.KRAKEN_USER, bin=kraken_bin)) #run("chmod 755 /etc/init.d/kraken_{}".format(instance)) # TODO refactor this and test it on systemd and non-systemd machines if not env.use_systemd: sudo("update-rc.d kraken_{} defaults".format(instance.name)) print(blue("INFO: Kraken {instance} instance is starting on {server}, " "waiting 5 seconds, we will check if processus is running".format( instance=instance.name, server=get_host_addr(env.host_string)))) service.start("kraken_{}".format(instance.name)) run("sleep 5") # we wait a bit for the kraken to pop # test it ! # execute(test_kraken, get_host_addr(env.host_string), instance, fail_if_error=False) with settings(warn_only=True): run("pgrep --list-name --full {}".format(instance.name)) print(blue("INFO: kraken {instance} instance is running on {server}". format(instance=instance.name, server=get_host_addr(env.host_string))))
def create_eng_instance(instance): """ Create a new kraken instance (idempotent) * Install requirements * Deploy the binary, the templatized ini configuration in a dedicated directory with rights to www-data and the logdir * Deploy initscript and add it to startup * Start the service """ instance = get_real_instance(instance) for host in instance.kraken_engines: with settings(host_string=host): # base_conf require.files.directory(instance.kraken_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) # logs require.files.directory(env.kraken_log_basedir, owner=env.KRAKEN_USER, group=env.KRAKEN_USER, use_sudo=True) update_eng_instance_conf(instance, host) # kraken.ini, pid and binary symlink kraken_bin = "{}/{}/kraken".format(env.kraken_basedir, instance.name) if not is_link(kraken_bin): idempotent_symlink("/usr/bin/kraken", kraken_bin, use_sudo=True) sudo('chown -h {user} {bin}'.format(user=env.KRAKEN_USER, bin=kraken_bin)) kraken = "kraken_{}".format(instance.name) if not service.is_running(kraken): # TODO test this on systemd machines if env.use_systemd: sudo("systemctl enable kraken_{}.service".format(instance.name)) else: sudo("update-rc.d kraken_{} defaults".format(instance.name)) print(blue("INFO: kraken {instance} instance is starting on {server}, " "waiting 5 seconds, we will check if processus is running" .format(instance=instance.name, server=get_host_addr(env.host_string)))) service.start(kraken) run("sleep 5") # we wait a bit for the kraken to pop with settings(warn_only=True): run("pgrep --list-name --full /srv/kraken/{}/kraken".format(instance.name)) print(blue("INFO: kraken {instance} instance is running on {server}". format(instance=instance.name, server=get_host_addr(host))))
def configure_nginx(): """ Configures nginx to serve the exercise webpage on port 8000. """ available_path = '/etc/nginx/sites-available/' + SITENAME enabled_path = '/etc/nginx/sites-enabled/' + SITENAME if(package_installed('nginx') == False): raise PackageNotInstalledException("nginx is not installed.") print("Configuring nginx") if(exists(available_path) == False): put('nginx.template.conf', '~/nginx.template.conf') sudo('mv ~/nginx.template.conf ' + available_path) sed(available_path, 'SITENAME', SITENAME, use_sudo=True) if(is_link(enabled_path, use_sudo=True, verbose=True)): sudo('rm ' + enabled_path) sudo('ln -s ' + available_path + ' ' + enabled_path)
def configure_nginx(): """ Configures nginx to serve the exercise webpage on port 8000. """ available_path = '/etc/nginx/sites-available/' + SITENAME enabled_path = '/etc/nginx/sites-enabled/' + SITENAME if (package_installed('nginx') == False): raise PackageNotInstalledException("nginx is not installed.") print("Configuring nginx") if (exists(available_path) == False): put('nginx.template.conf', '~/nginx.template.conf') sudo('mv ~/nginx.template.conf ' + available_path) sed(available_path, 'SITENAME', SITENAME, use_sudo=True) if (is_link(enabled_path, use_sudo=True, verbose=True)): sudo('rm ' + enabled_path) sudo('ln -s ' + available_path + ' ' + enabled_path)
def symlink(params): """ Create a symlink command. """ params = utils.format_params(params) if not 'source' in params: abort('No source set') if not 'target' in params: abort('No target set') if is_link(params['source']): print(yellow("Symlink `%s` exists and will be removed" % params['source'])) run('rm %s' % params['source']) command = "ln -s %s %s" % (params['target'], params['source']) run(command) print(green("Symlink from `%s` to `%s`." % (params['source'], params['target'])))
def configure_website(website_local_directory=WEBSITE_LOCAL_DIRECTORY): """ Task for *colour-science.local* website configuration. Parameters ---------- website_local_directory : unicode Website local directory. """ provider_directory = '/var/www/html' if not is_link(provider_directory): sudo(('sed -i "s/AllowOverride None/AllowOverride All/g" ' '/etc/apache2/apache2.conf')) sudo(('sed -i "s|/usr/lib/cgi-bin|/var/www/html/cgi-bin|g" ' '/etc/apache2/sites-enabled/000-default.conf')) sudo('rm -rf {0}'.format(provider_directory)) sudo('ln -fs {0} {1}'.format(website_local_directory, provider_directory)) sudo('a2enmod rewrite') sudo('service apache2 restart')
def createSymbolicLinks(self, release_name, deployMode): curReleaseDir = join(arke.Core.paths['releases'], release_name) print yellow('\n>> Creating links between shared files') for arr in arke.Core.options['project']['fileStructure']['shared']: if len(arr) == 1: arr = [arr[0], arr[0]] nodeOriginFullPath = join(curReleaseDir, arr[0]) nodeTargetFullPath = join(arke.Core.paths['shared'], arr[1]) print cyan('>>> Linking: current/%s -> shared/%s' % tuple(arr)) with hideOutput(): if is_link(nodeOriginFullPath): sudo('unlink %s' % (nodeOriginFullPath)) elif exists(nodeOriginFullPath): sudo('rm -rf %s' % (nodeOriginFullPath)) sudo('ln -sfv %s %s' % (nodeTargetFullPath, nodeOriginFullPath)) print green('>> Done linking shared files and folders') if deployMode != 'bundle' and 'toUpload' in arke.Core.options[ 'project']['fileStructure']: print yellow('\n>> Sending all files/folders listed on "toUpload"') for arr in arke.Core.options['project']['fileStructure'][ 'toUpload']: if len(arr) == 1: arr = [arr[0], arr[0]] nodeOriginFullPath = join(arke.Core.paths['base'], arr[0]) nodeTargetFullPath = join(curReleaseDir, arr[1]) print cyan('>>> Uploading: %s -> %s' % tuple(arr)) with hideOutput(): upload_project(local_dir=nodeOriginFullPath, remote_dir=nodeTargetFullPath, use_sudo=True) print green('>> Done uploading files and folders') self.fixPermissions()
def upload(self, release_id): from fabric.contrib import files import posixpath upload_storage_path = self._abs_path(self.upload_storage_path) self._ensure_path_exists(upload_storage_path) upload_release_path = self._get_upload_release_path(release_id) self._ensure_path_exists(upload_release_path) upload_storage_current_link = self._path_join( upload_storage_path, self.upload_storage_current_link) for local_path, remote_path in self.upload_paths: if files.exists(remote_path) and not files.is_link(remote_path): raise RuntimeError( 'Remote path already exists, but is no symlink (%s)' % remote_path) remote_upload_path = self._get_remote_upload_path( release_id, remote_path) if self._exists(upload_storage_current_link ) and not self._exists(remote_upload_path): remote_upload_pathname = self._get_remote_upload_pathname( release_id, remote_path) active_remote_upload_path = self._path_join( upload_storage_current_link, remote_upload_pathname) cp_active_remote_upload_path = active_remote_upload_path.rstrip( posixpath.sep) + posixpath.sep cp_remote_upload_path = remote_upload_path.rstrip( posixpath.sep) + posixpath.sep self._run('{cp_bin} {cp_args} {from_path} {to_path}'.format( cp_bin=self.cp_bin(), cp_args=self.cp_args, from_path=shell_quote(cp_active_remote_upload_path), to_path=shell_quote(cp_remote_upload_path), )) self._rsync_upload(local_path, remote_upload_path)
def disable(conf, do_restart=True): """ Disable logstash input/output provider :param conf: Input or output provider config file :param do_restart: Restart service :return: Got disabled? """ disabled = False conf = conf if conf.endswith('.conf') else '{}.conf'.format(conf) with sudo(), cd(conf_enabled_path): if files.is_link(conf): info('Disabling conf: {}', conf) with silent(): debian.rm(conf) disabled = True if do_restart: restart('server') else: warn('Invalid conf: {}'.format(conf)) return disabled
def disable(site, do_reload=True): """ Disable site :param site: Site to disable :param do_reload: Reload nginx service :return: Got disabled? """ disabled = False site = site if site.endswith('.conf') or site == 'default' else '{}.conf'.format(site) with sudo(), cd(sites_enabled_path): if files.is_link(site): info('Disabling site: {}', site) with silent(): debian.rm(site) disabled = True if do_reload: reload() else: warn('Invalid site: {}'.format(site)) return disabled
def disable(conf, do_restart=True): """ Disable logstash input/output provider :param conf: Input or output provider config file :param do_restart: Restart service :return: Got disabled? """ disabled = False conf = conf if conf.endswith(".conf") else "{}.conf".format(conf) with sudo(), cd(conf_enabled_path): if files.is_link(conf): info("Disabling conf: {}", conf) with silent(): debian.rm(conf) disabled = True if do_restart: restart("server") else: warn("Invalid conf: {}".format(conf)) return disabled
def disable(program, do_reload=True): """ Disable program. :param program: Program to disable :param do_reload: Reload supervisor :return: Got disabled? """ disabled = False program = program if program.endswith(".conf") or program == "default" else "{}.conf".format(program) with sudo(), cd(programs_enabled_path): if files.is_link(program): info("Disabling program: {}", program) with silent(): debian.rm(program) disabled = True if do_reload: reload() else: warn("Invalid program: {}".format(program)) return disabled
def disable(site, do_reload=True): """ Disable site :param site: Site to disable :param do_reload: Reload nginx service :return: Got disabled? """ disabled = False site = site if site.endswith( '.conf') or site == 'default' else '{}.conf'.format(site) with sudo(), cd(sites_enabled_path): if files.is_link(site): info('Disabling site: {}', site) with silent(): debian.rm(site) disabled = True if do_reload: reload() else: warn('Invalid site: {}'.format(site)) return disabled
def disable(program, do_reload=True): """ Disable program. :param program: Program to disable :param do_reload: Reload supervisor :return: Got disabled? """ disabled = False program = program if program.endswith( '.conf') or program == 'default' else '{}.conf'.format(program) with sudo(), cd(programs_enabled_path): if files.is_link(program): info('Disabling program: {}', program) with silent(): debian.rm(program) disabled = True if do_reload: reload() else: warn('Invalid program: {}'.format(program)) return disabled
def afterDeploy(self, release_name): curReleaseDir = join(arke.Core.paths['releases'], release_name) with hideOutput(), settings(warn_only=True): print yellow('\n>> Restarting services') for service in arke.Core.getEnvOption('services')['toRestart']: self.service_restart(service) print green('>> Done restarting services') print yellow('\n>> Reloading services') for service in arke.Core.getEnvOption('services')['toReload']: self.service_reload(service) print green('>> Done reloading services') if 'afterDeploy' in arke.Core.options['project']['cmds']: print yellow('\n>> Running after-deploy commands') with hide('running'): runCommandList( arke.Core.options['project']['cmds']['afterDeploy'], curReleaseDir, False) print green('>> Done running after-deploy commands') # Links latest release to the current directory print yellow('\n>> Linking "current" directory to newest release') with hideOutput(): if is_link(arke.Core.paths['current']): sudo('unlink %s' % (arke.Core.paths['current'])) elif exists(arke.Core.paths['current']): sudo('rm -rf %s' % (arke.Core.paths['current'])) sudo('ln -sfv %s %s' % (curReleaseDir, arke.Core.paths['current'])) print green('>> Done linking "current" directory') self.fixPermissions() self.cleanup_releases(arke.Core.options['project']['maxReleases'])
def test_is_link_is_true_on_symlink(self): self.remote.extend(['/tmp/foo', '/tmp/bar']) run("touch /tmp/foo") run("ln -s /tmp/foo /tmp/bar") assert files.is_link('/tmp/bar')
def _release(archive, revision=None, web_root=None, **kwargs): ''' Main task its role is to decompress an archive to the web root into a directory named 'app-X' where X identifies the revision; by default the revision is calculated from the sha256 of the archive when not indicated. :param version: :param archive: :param web_root: :param kwargs: :return: ''' previous_revision = None cwd = erun('pwd').stdout if not web_root else web_root if not os.path.exists(archive): raise CommandFailed('Archive \'%s\' doesn\'t exist' % archive) revision = revision or hashfile(archive, hashlib.sha256()) remote_filepath = '%s-%s' % (archive, revision) app_dir = os.path.join(cwd, 'app-%s' % revision) app_symlink = os.path.join(cwd, 'app') put(local_path=archive, remote_path=remote_filepath) try: # if exists remove dir if files.exists(app_dir): erun('rm -vfr %s' % ( app_dir, )) # create the remote dir erun('mkdir -p %s' % app_dir) erun('tar xf %s -C %s' % ( remote_filepath, app_dir, )) # find the previous release and move/unlink it if files.exists(app_symlink) and is_link(app_symlink): # TODO: move old deploy in an 'archive' directory previous_deploy_path = erun('basename $(readlink -f %s)' % app_symlink).stdout idx = previous_deploy_path.index('-') previous_revision = previous_deploy_path[idx + 1:] if previous_revision != revision: erun('unlink %s' % app_symlink) erun('mkdir -p old && mv -f %s old/' % previous_deploy_path) elif files.exists(app_symlink): raise CommandFailed('app directory already exists and is not a symlink') erun('ln -s %s %s' % (app_dir, app_symlink)) except CommandFailed as e: print 'An error occoured: %s' % e print ''' %s --> %s ''' % (previous_revision or '?', revision) open_shell('cd %s' % ( app_dir, ))
def release(head='HEAD', web_root=None, requirements=u'requirements.txt', envpath='.env', steps=None): '''Main task for releasing. Unarchive the release in the webroot, sync_virtualenv and update the app/ directory to point to the new release and archive in old/. ''' steps = validate_steps(steps) if steps else [] cwd = erun('pwd').stdout if not web_root else web_root abs_envpath = os.path.abspath(os.path.join(cwd, envpath)) if not files.exists(abs_envpath): raise abort('%s doesn\'t exist, create it before release using configure_env task!!!' % abs_envpath) # locally we create the archive with the app code create_release_archive(head) release_filename = get_release_filename() local_release_filepath = get_release_filepath() actual_version = describe_revision(head) previous_version = None # check that the archive contains the requirements file tf = tarfile.open(local_release_filepath) try: tf.getmember(requirements) except KeyError as e: abort('file \'%s\' doesn\'t exist, indicate a requirements file contained into the release archive' % requirements) finally: tf.close() # and upload it to the server if not files.exists(release_filename): put(local_path=local_release_filepath) app_dir = os.path.abspath(os.path.join(cwd, 'app-%s' % describe_revision(head))) virtualenv_path = os.path.abspath(os.path.join(cwd, '.virtualenv')) try: # if exists remove dir if files.exists(app_dir): erun('rm -vfr %s' % ( app_dir, )) # create the remote dir erun('mkdir -p %s' % app_dir) erun('tar xf %s -C %s' % ( release_filename, app_dir, )) sync_virtualenv(virtualenv_path, '%s/%s' % (app_dir, requirements,))# parametrize with cd(app_dir): for step in steps: step(virtualenv_path) # find the previous release and move/unlink it if is_link('app'): # TODO: move old deploy in an 'archive' directory previous_deploy_path = erun('basename $(readlink -f app)').stdout idx = previous_deploy_path.index('-') previous_version = previous_deploy_path[idx + 1:] if previous_version != actual_version: erun('unlink app') erun('mkdir -p old && mv -f %s old/' % previous_deploy_path) erun('ln -s %s app' % app_dir) except CommandFailed as e: print 'An error occoured: %s' % e print ''' %s --> %s Use 'honcho --env ../.env start' inside a screen session ''' % (previous_version, actual_version) open_shell('cd %s && source %s/bin/activate' % ( app_dir, virtualenv_path, ))
def setup(self): isInstalled = self.checkRequisites() sitesAvailable = '' sitesEnabled = '' installationMode = 'manual' if (isInstalled.has_key('nginx') and isInstalled['nginx']): sitesAvailable = join(arke.Core.paths['nginx'], 'sites-available', arke.Core.getEnvOption('name')) sitesEnabled = join(arke.Core.paths['nginx'], 'sites-enabled', arke.Core.getEnvOption('name')) # nginx setup if (isInstalled.has_key('ee') and isInstalled['ee'] and ask('Create website with EasyEngine?')): installationMode = 'ee' eeFlags = [ '--html', '--php7', '--mysql --php7', '--wp --php7', '--wpfc --php7' ] print yellow('\n>> Creating site with EasyEngine') siteFlags = eeFlags[whichOption([ 'HTML', 'PHP', 'PHP \ MySQL', 'Wordpress', 'Wordpress + FastCGI Cache' ], 'Choose a website type', 'Type: ')] with hide('warnings'), settings(warn_only=True): sudo('ee site create %s %s' % (siteFlags, arke.Core.getEnvOption('name'))) with hideOutput(): # Appends /current to the server block root path sed(sitesAvailable, 'root .*;', 'root %s;' % arke.Core.paths['webRoot'], limit='', use_sudo=True, backup='', flags='i', shell='/bin/bash') # Deletes default files if (len(arke.Core.paths['publicHTML']) > 0 and arke.Core.paths['publicHTML'] != '/'): sudo('rm -rf %s/*' % arke.Core.paths['publicHTML']) print green('>> Done creating site with EasyEngine') elif ask('Run nginx configuration setup instead?'): print yellow('\n>> Creating site with arke\'s nginx template') with hideOutput(): print cyan('>>> Uploading nginx.conf -> shared/nginx.conf') put('%s/templates/nginx/nginx.conf' % arke.Core.paths['auxFiles'], join(arke.Core.paths['shared'], 'nginx.conf'), use_sudo=True) print cyan( '>>> Uploading nginx server block -> etc/sites-available/%s' % arke.Core.getEnvOption('name')) self.upload_template( 'site', sitesAvailable, template_dir='%s/templates/nginx' % arke.Core.paths['auxFiles'], use_sudo=True, use_jinja=True, context={ 'HOSTNAME': arke.Core.getEnvOption('hostnames'), 'ROOT': arke.Core.paths['webRoot'], }) print cyan('>>> Linking sites-available -> sites-enabled') if is_link(sitesEnabled): sudo('unlink %s' % sitesEnabled) elif exists(sitesEnabled): sudo('rm -rf %s' % sitesEnabled) sudo('ln -sfv %s %s' % (sitesAvailable, sitesEnabled)) self.service_reload('nginx') print green( '>> Done creating site with arke\'s nginx template') # Creates shared/release directory structure print yellow('\n>> Creating shared and releases directories') with hideOutput(): sudo('mkdir -p %s %s' % (arke.Core.paths['releases'], join(arke.Core.paths['shared'], 'uploads'))) sudo('touch %s/robots.txt' % arke.Core.paths['shared']) print green('>> Done creating shared and releases directories') if (installationMode == 'ee'): defaultWPConfig = '%s/wp-config.php' % arke.Core.paths['project'] if (exists(defaultWPConfig)): print yellow( '\n>> Moving EasyEngine\'s default wp-config.php to shared folder' ) with hideOutput(): sudo('mv %s %s/' % (defaultWPConfig, arke.Core.paths['shared'])) print green('>> Done moving default wp-config.php') print '' if (ask('Link htdocs/.well-known to shared/.well-known?')): print yellow('\n>> Linking .well-known directory') with hideOutput(): sudo('ln -sfv %s/.well-known %s/.well-known' % (arke.Core.paths['publicHTML'], arke.Core.paths['shared'])) print green('>> Done linking .well-known') # .env print '' if ask('Upload .env?'): print yellow('\n>> Uploading .env') with cd(arke.Core.paths['shared']), hideOutput(): self.upload_template('dotenv', '.env', template_dir='%s/templates/wp/' % arke.Core.paths['auxFiles'], use_sudo=True, use_jinja=True, context={ 'ENVIRONMENT': env.name, 'MAIN_URL': arke.Core.getEnvOption('name') }) print cyan('>>> Generating salts on the .env file') with hideOutput(), settings(warn_only=True): run('wp dotenv salts regenerate') print green('>> Done uploading .env') elif ask('Upload wp-config.php?'): print yellow('\n>> Uploading wp-config.php') with cd(arke.Core.paths['shared']), hideOutput(): self.upload_template('wp-config.php', 'wp-config.php', template_dir='%s/templates/wp/' % arke.Core.paths['auxFiles'], use_sudo=True, use_jinja=True, context={'ENVIRONMENT': env.name}) print green('>> Done uploading wp-config.php') self.fixPermissions()
def test_is_link_is_true_on_symlink(self): self.created.append("/tmp/bar") run("ln -s /tmp/foo /tmp/bar") assert files.is_link('/tmp/bar')
def test_is_link_is_false_on_non_link(self): self.created.append("/tmp/biz") run("touch /tmp/biz") assert not files.is_link('/tmp/biz')
def is_valid_path(path, is_link=False): with settings(warn_only=True): if files.exists(path) and (not is_link or files.is_link(path)): return True return False
def test_is_link_is_false_on_non_link(self): self.remote.append('/tmp/biz') run("touch /tmp/biz") assert not files.is_link('/tmp/biz')
def test_is_link_is_false_on_non_link(self): run("touch /tmp/biz") assert not files.is_link('/tmp/biz')
def test_is_link_is_true_on_symlink(self): run("ln -s /tmp/foo /tmp/bar") assert files.is_link('/tmp/bar')
def create_link(): with cd(app_dir): if is_link(os.path.join(app_dir, "current")): sudo("rm -f {0}".format(os.path.join(app_dir, "current"))) symlink(os.path.join(app_dir, now), "current", use_sudo=True)
def deploy(head="HEAD", requirements="requirements/production.txt"): """Deploy the latest version of the site to the server and restart services""" create_release_archive() release_filename = get_release_filename() actual_version = describe_revision(head) previous_version = None remote_path = "%(path)s/releases" % env release_dir = os.path.join(remote_path, "app-%s" % describe_revision(head)) current_release_dir = os.path.join(remote_path, "current") previous_release_dir = os.path.join(remote_path, "previous") virtualenv_path = os.path.abspath(os.path.join(env.path, ".virtualenv")) # and upload it to the server if not files.exists(release_dir): put(local_path=get_release_filepath(), remote_path=remote_path) try: # if exists remove dir if files.exists(release_dir): erun("rm -vfr %s" % (release_dir,)) # create the remote dir erun("mkdir -p %s" % release_dir) erun("tar xf %s -C %s" % (os.path.join(remote_path, release_filename), release_dir)) # remove tar erun("rm %s" % os.path.join(remote_path, release_filename)) # copy .env erun("cp %s %s" % (os.path.join(env.path, ".env"), release_dir)) sync_virtualenv(virtualenv_path, "%s/%s" % (release_dir, requirements)) # parametrize with remote_virtualenv(release_dir): erun("python manage.py collectstatic --noinput") erun("python manage.py migrate") # find the previous release and move/unlink it if is_link(current_release_dir): # TODO: move old deploy in the 'previous' directory previous_deploy_path = erun("basename $(readlink -f %s)" % current_release_dir).stdout idx = previous_deploy_path.index("-") previous_version = previous_deploy_path[idx + 1 :] if previous_version != actual_version: if files.exists(previous_release_dir): erun("rm -R %s" % previous_release_dir) erun("mv %s %s" % (current_release_dir, previous_release_dir)) erun("ln -s %s %s" % (release_dir, current_release_dir)) restart() print """ #################################### # shell # #################################### """ print """ %s --> %s Use 'ps ax | grep uwsgi' to check uwsgi processes If first deploy run 'python manage.py createsuperuser' to create new superuser """ % ( previous_version, actual_version, ) open_shell("cd %s && source %s/bin/activate" % (current_release_dir, virtualenv_path)) except CommandFailed as e: print "An error occoured: %s" % e print """ #################################### # fallback to shell # #################################### """ print """ %s --> %s Use '../../.virtualenv/bin/uwsgi --ini uwsgi.ini' to start uwsgi """ % ( previous_version, actual_version, ) open_shell("cd %s && source %s/bin/activate" % (release_dir, virtualenv_path))