def deploy(): """Deploy to a given environment.""" # NOTE: chef will check every 30 minutes or so whether the # repo has changed, and if so, redeploy. Or you can use this # to make it run immediately. require('environment') sudo('chef-client')
def start_nagios(): ''' Starts Nagios on VM and sets up Nagios as upstart job. ''' put('./commands.cfg','/usr/local/nagios/etc/objects/commands.cfg', use_sudo=True) sudo('ln -sf /etc/init.d/nagios /etc/rcS.d/S99nagios') sudo('/etc/init.d/nagios start')
def setMaster(): if exists('/etc/hosts0'): print 'etc/hosts0 exists' else: sudo('cp /etc/hosts /etc/hosts0') sudo('rm /etc/hosts') sudo('cp /etc/hosts0 /etc/hosts') put('hosts') sudo('cat hosts|sudo tee -a /etc/hosts') run('rm hosts') run('cat /etc/hosts') path1 = '/home/{0}'.format(parm['USER']) rsync_project(path1, exclude=['result']) path2 = join(path1, basename(realpath('.'))) path3 = join(path2, parm['programdir']) for dst in (path2, path3): fi = '{0}/{1}'.format(dst, parm['keyfile']) if not exists(fi, use_sudo=True): put(parm['keyfile'], dst) sudo('chmod 400 {0}'.format(fi)) execute('genkey')
def install_qgis_master(gdal_from_source=False): """Install QGIS master under /usr/local/qgis-master. :param gdal_from_source: Whether gdal should be built from source. Default False. :type gdal_from_source: bool TODO: create one function from this and the 1.8 function above for DRY. """ setup_env() setup_ccache() add_ubuntugis_ppa() sudo('apt-get build-dep -y qgis') #fabtools.require.deb.package('python-pyspatialite') fabtools.require.deb.package('python-psycopg2') fabtools.require.deb.package('python-qscintilla2') fabtools.require.deb.package('libqscintilla2-dev') fabtools.require.deb.package('libspatialindex-dev') clone_qgis(branch='master') workspace = '%s/cpp' % env.fg.workspace code_path = '%s/QGIS' % workspace build_path = '%s/build-master-fabgis' % code_path build_prefix = '/usr/local/qgis-master' compile_qgis(build_path, build_prefix, gdal_from_source)
def _install_packages(self): sudo('pkg_add python27') sudo('pkg_add py27-psycopg2') sudo('pkg_add py27-setuptools') sudo('pkg_add py27-imaging') sudo('easy_install-2.7 pip') self._install_venv()
def main(): connection = common.connect() region = common.prompt_region(connection) connection = common.connect(region) zone = common.prompt_zone(connection) security_group = common.prompt_security_group(connection) prefix = "{}-{}-".format(security_group, zone.split("-")[-1]) name = _prompt_name(connection, prefix) instance_type = _prompt_instance_type() key_path = common.prompt_key_path() key_name = os.path.basename(key_path).split(".")[0] arguments = _LaunchArguments(instance_type=instance_type, key_name=key_name, name=name, security_group=security_group, zone=zone) env.host_string = _launch(connection, arguments, region) env.key_filename = key_path env.user = _USERNAME common.wait_until_remote_reachable() sudo("hostname {}".format(name)) _update_system_files(name) _install() _update_installed_files() reboot() if instance_type.ephemeral_disk_count > 1: _create_ephemeral_raid(instance_type.ephemeral_disk_count) if _GIT_REPO: _clone()
def _add_user(username, uid=None): """ Add user with username to the system """ if not contains('/etc/passwd', "%s:" % username): uid_str = "--uid %s" % uid if uid else "" sudo('useradd -d /home/%s --create-home --shell /bin/bash ' \ '-c"Galaxy-required user" %s --user-group %s' % \ (username, uid_str, username))
def upgrade_pil(): with cd(env.NEWSBLUR_PATH): sudo('easy_install pillow') # celery_stop() pull() sudo('apt-get remove -y python-imaging') kill()
def copy_postgres_to_standby(): slave = 'db02' # Make sure you can ssh from master to slave and back. # Need to give postgres accounts keys in authroized_keys. # sudo('su postgres -c "psql -c \"SELECT pg_start_backup(\'label\', true)\""', pty=False) sudo('su postgres -c \"rsync -a --stats --progress /var/lib/postgresql/9.2/main postgres@%s:/var/lib/postgresql/9.2/ --exclude postmaster.pid\"' % slave, pty=False)
def setup_node_app(): sudo('add-apt-repository -y ppa:chris-lea/node.js') sudo('apt-get update') sudo('apt-get install -y nodejs') run('curl -L https://npmjs.org/install.sh | sudo sh') sudo('npm install -g supervisor') sudo('ufw allow 8888')
def config_haproxy(debug=False): if debug: put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) else: put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), '/etc/haproxy/haproxy.cfg', use_sudo=True) sudo('/etc/init.d/haproxy reload')
def kill_celery(): with cd(env.NEWSBLUR_PATH): with settings(warn_only=True): if env.user == 'ubuntu': sudo('./utils/kill_celery.sh') else: run('./utils/kill_celery.sh')
def bounce_pgbouncer(): sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False) run('sleep 2') with settings(warn_only=True): sudo('pkill -9 pgbouncer -e') run('sleep 2') run('sudo /etc/init.d/pgbouncer start', pty=False)
def install_data_system(): """ Installing and deploying cozy-data-system. """ with cd('/home/cozy/cozy-setup'): sudo('coffee monitor install data-system', user='******') print(green("Data System successfully started"))
def kill(): sudo('supervisorctl reload') with settings(warn_only=True): if env.user == 'ubuntu': sudo('./utils/kill_gunicorn.sh') else: run('./utils/kill_gunicorn.sh')
def install_requirements(filename, upgrade=False, use_mirrors=False, use_sudo=False, user=None, download_cache=None, quiet=False, pip_cmd='pip'): """ Install Python packages from a pip `requirements file`_. :: import fabtools fabtools.python.install_requirements('project/requirements.txt') .. _requirements file: http://www.pip-installer.org/en/latest/requirements.html """ options = [] if use_mirrors: options.append('--use-mirrors') if upgrade: options.append('--upgrade') if download_cache: options.append('--download-cache="%s"' % download_cache) if quiet: options.append('--quiet') options = ' '.join(options) command = '%(pip_cmd)s install %(options)s -r %(filename)s' % locals() if use_sudo: sudo(command, user=user, pty=False) else: run(command, pty=False)
def create_virtualenv(directory, system_site_packages=False, venv_python=None, use_sudo=False, user=None, clear=False, prompt=None, virtualenv_cmd='virtualenv'): """ Create a Python `virtual environment`_. :: import fabtools fabtools.python.create_virtualenv('/path/to/venv') .. _virtual environment: http://www.virtualenv.org/ """ options = ['--quiet'] if system_site_packages: options.append('--system-site-packages') if venv_python: options.append('--python=%s' % quote(venv_python)) if clear: options.append('--clear') if prompt: options.append('--prompt=%s' % quote(prompt)) options = ' '.join(options) directory = quote(directory) command = '%(virtualenv_cmd)s %(options)s %(directory)s' % locals() if use_sudo: sudo(command, user=user) else: run(command)
def deploy(): backup() with cd(env.directory): run('git checkout master') run('git fetch') run('git reset --hard origin/master') run("find . -name '*.pyc' -delete") virtualenv('pip install -r requirements.pip') virtualenv('python manage.py collectstatic --noinput') virtualenv('python manage.py syncdb') virtualenv('python manage.py migrate') virtualenv('python manage.py rebuild_index --noinput') # Copy across our supervisor configuration, as it may have changed. run('cp configuration/picky.conf /etc/supervisor/conf.d/') restart() with cd(env.directory): run('cp configuration/wiki.wilfred.me.uk /etc/nginx/sites-enabled/') sudo("service nginx reload", shell=False)
def restart_treesheets(): """Restarts the NodeJS process.""" # The restart command in my init.d script fails for some reason. # But stop and start works. # TODO(eob): Fix the restart init.d script. sudo('/etc/init.d/treesheets stop') sudo('/etc/init.d/treesheets start')
def enable(): """ gets uwsgi to reload python code """ notify('Enabling app') sudo('if [ ! -f %(enabled_wsgi)s ]; then ln -s %(available_wsgi)s %(enabled_wsgi)s; fi' % env) sudo('touch %(enabled_wsgi)s' % env)
def provision(): require('deploy_type', provided_by=[dev, prod]) commands = ( 'apt-get install --yes --no-upgrade %s' % ' '.join(PACKAGES), ) if env.deploy_type == 'prod': commands += ( 'mkdir -p %s' % env.deploy_dir, 'chown -R %s:%s %s' % (env.deploy_user, env.deploy_user, env.deploy_dir), ) if env.deploy_type == 'dev': local('; '.join('sudo %s' % cmd for cmd in commands)) local('sudo apt-get build-dep python-numpy python-psycopg2 --no-upgrade') local('sudo npm config set registry http://registry.npmjs.org/') local('sudo npm -g install yuglify') return sudo('; '.join(commands)) sudo('apt-get build-dep python-numpy python-psycopg2 --no-upgrade') sudo('npm config set registry http://registry.npmjs.org/') sudo('npm -g install yuglify') sudo('mv -f /etc/nginx/sites-enabled/default /etc/nginx/sites-available') provision_api() git_checkout()
def _clone_galaxy_repo(env): """ Clone Galaxy source code repository from ``env.galaxy_repository`` to ``env.galaxy_home``, setting the directory ownership to ``env.galaxy_user`` This method cannot be used to update an existing Galaxy installation. """ # Make sure ``env.galaxy_home`` dir exists but without Galaxy in it galaxy_exists = False if exists(env.galaxy_home): if exists(os.path.join(env.galaxy_home, '.hg')): env.logger.warning("Galaxy install dir {0} exists and seems to have " \ "a Mercurial repository already there. Galaxy already installed?"\ .format(env.galaxy_home)) galaxy_exists = True else: sudo("mkdir -p '%s'" % env.galaxy_home) if not galaxy_exists: with cd(env.galaxy_home): # Needs to be done as non galaxy user, otherwise we have a # permissions problem. galaxy_repository = env.get("galaxy_repository", 'https://bitbucket.org/galaxy/galaxy-central/') env.safe_sudo('hg clone %s .' % galaxy_repository) # Make sure ``env.galaxy_home`` is owned by ``env.galaxy_user`` _chown_galaxy(env, env.galaxy_home) # Make sure env.galaxy_home root dir is also owned by env.galaxy_user so Galaxy # process can create necessary dirs (e.g., shed_tools, tmp) sudo("chown {0}:{0} {1}".format(env.galaxy_user, os.path.split(env.galaxy_home)[0])) # If needed, custom-configure this freshly cloned Galaxy preconfigured = _read_boolean(env, "galaxy_preconfigured_repository", False) if not preconfigured: _configure_galaxy_repository(env)
def git_require(): """ Test high level git tools. These tests should also cover the low level tools as all of them are called indirectly. """ from fabric.api import cd, sudo from fabtools import require from fabtools.system import distrib_family family = distrib_family() if family == "debian": require.deb.package("git-core") elif family == "redhat": require.rpm.package("git") with cd("/tmp"): # Clean up sudo("rm -rf *") git_require_remote_url() git_require_remote_url_and_path() git_require_no_update() git_require_branch() git_require_sudo() git_require_sudo_user()
def install_packages(self, packages=None): if not packages: packages = self.packages sudo('aptitude -y install %s' % " ".join(self.packages)) for s in self.servers: if hasattr(s, 'packages'): sudo('aptitude -y install %s' % " ".join(s.packages))
def sysctl(setting, value=None, do_reload=True): seek = r'%s\s*=' % setting option = '%s=%s' % (setting, value) file_update('/etc/sysctl.conf', seek, option, use_sudo=True) if do_reload: sudo('sysctl -p')
def install_packages(update=True): """ Install necesary system packages. This is onen of the few commands whose commands actually has to be in the fabfile, since the govtrack code is not known to be in the latest state (or on the machine at all) when the command gets run. Here we install things like git and python, without which we can't proceed. """ if update: sudo('apt update') sudo('apt install -y git python-virtualenv python-lxml python-openid' ' python-oauth2client python-iso8601 python-numpy python-scipy' ' python-prctl python-pip libssl-dev' # For Solr ' openjdk-8-jre jetty8' # For PostgreSQL & MySQLclient support ' libpq-dev' ' libmysqlclient-dev' # For the web server ' nginx') install_ssl_packages(update=update)
def compress(use_current_release=False): """Run Django Compressor after a code update""" venv = env.virtualenv_root if not use_current_release else env.virtualenv_current with cd(env.code_root if not use_current_release else env.code_current): sudo('{}/bin/python manage.py compress --force -v 0'.format(venv)) sudo('{}/bin/python manage.py purge_compressed_files'.format(venv)) update_manifest(save=True, use_current_release=use_current_release)
def git_require_sudo_user(): """ Test working_copy() with sudo as a user """ from fabric.api import cd, sudo from fabtools.files import group, is_dir, owner from fabtools import require require.user("gituser", group="gitgroup") require.git.working_copy(REMOTE_URL, path="wc_nobody", use_sudo=True, user="******") assert is_dir("wc_nobody") assert is_dir("wc_nobody/.git") with cd("wc_nobody"): remotes = sudo("git remote -v", user="******") assert ( remotes == "origin\thttps://github.com/disko/fabtools.git (fetch)\r\n" "origin\thttps://github.com/disko/fabtools.git (push)" ) branch = sudo("git branch", user="******") assert branch == "* master" assert owner("wc_nobody") == "gituser" assert group("wc_nobody") == "gitgroup"
def _initialize_area_viral(): _initialize_env("viral") env.VIRAL_SCRIPT = "%s/viral_assembly_pipeline.py" % dependency_URL viral_dirs["PROJECT_DIR"] = "%(VIRAL_ROOT_DIR)s/project" % env viral_dirs["REF_DIR"] = "%(VIRAL_ROOT_DIR)s/references" % env viral_dirs["TOOLS_DIR"] = "%(VIRAL_ROOT_DIR)s/tools" % env viral_dirs["TOOLS_BINARIES_DIR"] = "%s/BINARIES" % viral_dirs["TOOLS_DIR"] viral_dirs["TOOLS_PERL_DIR"] = "%s/PERL" % viral_dirs["TOOLS_DIR"] env.VIRAL_REF_FILES = "corona_virus,hadv,influenza_a_virus,jev,mpv,norv,rota_virus,rsv,veev,vzv,yfv" viral_urls["BIO_LINUX_URL"] = "http://nebc.nerc.ac.uk/bio-linux/" viral_tars["BINARIES_TARBALL"] = "BINARIES.tgz" viral_tars["PERL_TARBALL"] = "PERL.tgz" print("user: %(user)s" % env) print("host: %(host)s" % env) print("ROOT DIR: %(VIRAL_ROOT_DIR)s" % env) print("VIRAL ASSEMBLY SCRIPT: %(VIRAL_SCRIPT)s" % env) for name in sorted(viral_dirs.keys()): if not _path_is_dir(viral_dirs[name]): sudo("mkdir -p %s" % viral_dirs[name]) print("%s: %s" % (name,viral_dirs[name])) print("VIRAL ASSEMBLY REFS FILES: %(VIRAL_REF_FILES)s" % env) for name in sorted(viral_urls.keys()): print("%s: %s" % (name,viral_urls[name])) for name in sorted(viral_tars.keys()): print("%s: %s" % (name,viral_tars[name]))
def deploy_web(): """ Installs the output of the build on the web instances. """ require("configuration") if exists(env.deploy_dir): run("rm -rf %s" % env.deploy_dir) run("tar -xvzf %s" % env.build_archive) run("mv %s deploy" % env.git_tag) run("source /usr/local/bin/virtualenvwrapper.sh && mkvirtualenv venv") env.SHELL_ENV = dict( DJANGO_SETTINGS_MODULE=env.django_settings_module, DJANGO_CONFIGURATION=env.django_configuration, CONFIG_HTTP_PORT=env.config_http_port, CONFIG_SERVER_NAME=env.config_server_name, ) print env.SHELL_ENV with cd(env.deploy_dir): with prefix("source /usr/local/bin/virtualenvwrapper.sh && workon venv"), shell_env(**env.SHELL_ENV): requirements_path = "/".join(["codalab", "requirements", "dev_azure_nix.txt"]) pip_cmd = "pip install -r {0}".format(requirements_path) run(pip_cmd) # additional requirements for bundle service run("pip install SQLAlchemy simplejson") with cd("codalab"): run("python manage.py config_gen") run("mkdir -p ~/.codalab && cp ./config/generated/bundle_server_config.json ~/.codalab/config.json") run("python manage.py syncdb --migrate") run("python scripts/initialize.py") run("python manage.py collectstatic --noinput") sudo("ln -sf `pwd`/config/generated/nginx.conf /etc/nginx/sites-enabled/codalab.conf") sudo("ln -sf `pwd`/config/generated/supervisor.conf /etc/supervisor/conf.d/codalab.conf")
def install_apache_spark(): run("curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o /tmp/spark-2.1.1-bin-hadoop2.6.tgz") sudo("tar -xf /tmp/spark-2.1.1-bin-hadoop2.6.tgz -C /opt/") sudo("ln -s /opt/spark-2.1.1-bin-hadoop2.6 /opt/spark") run("echo 'export SPARK_HOME=/opt/spark' >> ${HOME}/.profile")
def debconf_set_selections(package, selections): """Given package and map config:(type,value), set selections""" text = '\n'.join(' '.join([package, k, t, v]) for k, (t, v) in selections.iteritems()) sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def setup_project(): sudo("mkdir -p /var/log/alaScrapy") sudo("chown alascrapy:alatest /var/log/alaScrapy") sudo("mkdir -p /var/local/load/running/") sudo("chown alascrapy:alatest /var/local/load/running/") sudo("mkdir -p /var/local/load/finished/") sudo("chown alascrapy:alatest /var/local/load/finished/") sudo("mkdir -p /var/local/load/amazon/") sudo("chown alascrapy:alatest /var/local/load/amazon/") sudo("mkdir -p /var/log/load/") sudo("chown alascrapy:alatest /var/log/load/") sudo("chown -R root:alatest /var/www/html") sudo("chmod -R g+w /var/www/html") setup_folder(env.install_root, mode="2774") #cleanser: python-amqplib sudo("""apt-get install python-setuptools \ python-dev \ python-mock \ python-amqplib \ libmysqlclient-dev \ libxml2-dev \ libxslt1-dev \ xvfb \ apache2 \ libtiff5-dev \ libjpeg8-dev \ zlib1g-dev \ libfreetype6-dev \ liblcms2-dev \ libgconf-2-4 \ libwebp-dev \ libxi6 \ tcl8.6-dev \ tk8.6-dev \ python-tk \ python-requests \ chromium-browser=63.0.* \ unzip""") sudo("chown -R %s:%s %s " % (USER, GROUP, env.install_root)) setup_gecko_driver() setup_chrome_driver()
def restart_services(): sudo('supervisorctl restart all')
def create_marvin_engines_prefix(): sudo("mkdir -p /opt/marvin/engines") sudo("chown {user}:{user} /opt/marvin/engines".format(user=env.user)) sudo("mkdir -p /var/log/marvin/engines") sudo("chown {user}:{user} /var/log/marvin/engines".format(user=env.user)) sudo("mkdir -p /var/run/marvin/engines") sudo("chown {user}:{user} /var/run/marvin/engines".format(user=env.user))
def install_marvin_engine_executor(): sudo("mkdir -p {prefix}".format(prefix=env.margin_engine_executor_prefix)) with cd("{prefix}".format(prefix=env.margin_engine_executor_prefix)): sudo("wget https://s3.amazonaws.com/marvin-engine-executor/{jar}".format(jar=env.margin_engine_executor_jar))
def install_required_packages(): sudo("apt-get update -y") sudo("apt-get install -y git") sudo("apt-get install -y wget") sudo("apt-get install -y python2.7-dev") sudo("apt-get install -y python-pip") sudo("apt-get install -y ipython") sudo("apt-get install -y libffi-dev") sudo("apt-get install -y libssl-dev") sudo("apt-get install -y libxml2-dev") sudo("apt-get install -y libxslt1-dev") sudo("apt-get install -y libpng12-dev") sudo("apt-get install -y libfreetype6-dev") sudo("apt-get install -y python-tk") sudo("apt-get install -y libsasl2-dev") sudo("apt-get install -y python-pip") sudo("apt-get install -y graphviz") sudo("pip install --upgrade pip")
def reload(): """Starts or restarts nginx.""" with settings(hide("stderr"), sudo_prefix=SUDO_PREFIX): return sudo("service postgresql reload")
def get(self, remote_path, local_path, use_sudo, local_is_path, rremote=None, temp_dir=""): from fabric.api import sudo, hide # rremote => relative remote path, so get(/var/log) would result in # this function being called with # remote_path=/var/log/apache2/access.log and # rremote=apache2/access.log rremote = rremote if rremote is not None else remote_path # Handle format string interpolation (e.g. %(dirname)s) path_vars = { 'host': env.host_string.replace(':', '-'), 'basename': os.path.basename(rremote), 'dirname': os.path.dirname(rremote), 'path': rremote } if local_is_path: # Naive fix to issue #711 escaped_path = re.sub(r'(%[^()]*\w)', r'%\1', local_path) local_path = os.path.abspath(escaped_path % path_vars) # Ensure we give ssh.SFTPCLient a file by prepending and/or # creating local directories as appropriate. dirpath, filepath = os.path.split(local_path) if dirpath and not os.path.exists(dirpath): os.makedirs(dirpath) if os.path.isdir(local_path): local_path = os.path.join(local_path, path_vars['basename']) if output.running: print("[%s] download: %s <- %s" % (env.host_string, _format_local(local_path, local_is_path), remote_path)) # Warn about overwrites, but keep going if local_is_path and os.path.exists(local_path): msg = "Local file %s already exists and is being overwritten." warn(msg % local_path) # When using sudo, "bounce" the file through a guaranteed-unique file # path in the default remote CWD (which, typically, the login user will # have write permissions on) in order to sudo(cp) it. if use_sudo: target_path = remote_path hasher = hashlib.sha1() hasher.update(env.host_string) hasher.update(target_path) target_path = posixpath.join(temp_dir, hasher.hexdigest()) # Temporarily nuke 'cwd' so sudo() doesn't "cd" its mv command. # (The target path has already been cwd-ified elsewhere.) with settings(hide('everything'), cwd=""): sudo('cp -p "%s" "%s"' % (remote_path, target_path)) # The user should always own the copied file. sudo('chown %s "%s"' % (env.user, target_path)) # Only root and the user has the right to read the file sudo('chmod %o "%s"' % (0400, target_path)) remote_path = target_path try: # File-like objects: reset to file seek 0 (to ensure full overwrite) # and then use Paramiko's getfo() directly getter = self.ftp.get if not local_is_path: local_path.seek(0) getter = self.ftp.getfo getter(remote_path, local_path) finally: # try to remove the temporary file after the download if use_sudo: with settings(hide('everything'), cwd=""): sudo('rm -f "%s"' % remote_path) # Return local_path object for posterity. (If mutated, caller will want # to know.) return local_path
def install_oracle_jdk(): sudo("add-apt-repository ppa:webupd8team/java -y") sudo("apt-get -qq update") run("echo debconf shared/accepted-oracle-license-v1-1 select true | sudo debconf-set-selections") run("echo debconf shared/accepted-oracle-license-v1-1 seen true | sudo debconf-set-selections") sudo("apt-get install -y oracle-java8-installer")
def sdo(command): """Execute arbitrary commands with sudo""" sudo(command)
def put(self, local_path, remote_path, use_sudo, mirror_local_mode, mode, local_is_path, temp_dir, callback=None): from fabric.api import sudo, hide pre = self.ftp.getcwd() pre = pre if pre else '' if local_is_path and self.isdir(remote_path): basename = os.path.basename(local_path) remote_path = posixpath.join(remote_path, basename) if output.running: print("[%s] put: %s -> %s" % (env.host_string, _format_local(local_path, local_is_path), posixpath.join(pre, remote_path))) # When using sudo, "bounce" the file through a guaranteed-unique file # path in the default remote CWD (which, typically, the login user will # have write permissions on) in order to sudo(mv) it later. if use_sudo: target_path = remote_path hasher = hashlib.sha1() hasher.update(env.host_string) hasher.update(target_path) remote_path = posixpath.join(temp_dir, hasher.hexdigest()) # Read, ensuring we handle file-like objects correct re: seek pointer putter = self.ftp.put if not local_is_path: old_pointer = local_path.tell() local_path.seek(0) putter = self.ftp.putfo if callback is not None: real_callback = partial(callback, local_path) else: real_callback = None rattrs = putter(local_path, remote_path, callback=real_callback) if not local_is_path: local_path.seek(old_pointer) # Handle modes if necessary if (local_is_path and mirror_local_mode) or (mode is not None): lmode = os.stat(local_path).st_mode if mirror_local_mode else mode # Cast to octal integer in case of string if isinstance(lmode, basestring): lmode = int(lmode, 8) lmode = lmode & 07777 rmode = rattrs.st_mode # Only bitshift if we actually got an rmode if rmode is not None: rmode = (rmode & 07777) if lmode != rmode: if use_sudo: # Temporarily nuke 'cwd' so sudo() doesn't "cd" its mv # command. (The target path has already been cwd-ified # elsewhere.) with settings(hide('everything'), cwd=""): sudo('chmod %o \"%s\"' % (lmode, remote_path)) else: self.ftp.chmod(remote_path, lmode) if use_sudo: # Temporarily nuke 'cwd' so sudo() doesn't "cd" its mv command. # (The target path has already been cwd-ified elsewhere.) with settings(hide('everything'), cwd=""): sudo("mv \"%s\" \"%s\"" % (remote_path, target_path)) # Revert to original remote_path for return value's sake remote_path = target_path return remote_path
def enable_services(services): for x in services: sudo('sudo systemctl enable {0}'.format(x)) sudo('sudo systemctl start {0}'.format(x))
def sudorun(command): """ Usage: fab -H server1,server2 sudorun:"fdisk -l" """ sudo(command)
def restart_nginx(): """Restart nginx on host""" print(green('Restarting nginx on host')) sudo('service nginx restart')
def enable_tcp_ports(ports): for x in ports: sudo('firewall-cmd --add-port={0}/tcp'.format(x)) sudo('firewall-cmd --permanent --add-port={0}/tcp'.format(x))
def _update_syslog(stackname): with stack_conn(stackname): cmd = "salt-call state.sls_id syslog-ng-hook base.syslog-ng test=True" return sudo(cmd)
def yum_command(command, packages, proxy=''): with shell_env(http_proxy=proxy, ftp_proxy=proxy): sudo('yum -y {0} {1}'.format(command, ' '.join(packages)), pty=False)
def bootstrap(): # Upgrade system packages upgrade_packages() # Install basic package requirements sudo("apt-get install -y build-essential git-core subversion mercurial " "bison openssl libreadline6 libreadline6-dev " "curl zlib1g zlib1g-dev libssl-dev libyaml-dev " "libsqlite3-0 libsqlite3-dev sqlite3 libxml2-dev " "libxslt-dev autoconf libc6-dev ncurses-dev " "python-dev python-software-properties") # Add Postgresql and Nginx PPAs sudo("add-apt-repository ppa:pitti/postgresql") sudo("add-apt-repository ppa:nginx/stable") sudo("apt-get update") # System-Wide RVM install sudo("bash < <(curl -sk https://rvm.beginrescueend.com/install/rvm)") sudo("usermod -a -G rvm %s" % env.user) # Disconnect to have RVM properly load network.disconnect_all() # Install Ruby and Chef rvm_install('ruby-1.9.2-p290', True) run("gem install chef ohai --no-ri --no-rdoc")
def change_pwd_usermod_p(username, pwd_hash): cmd = "usermod -p '%s' '%s'" % (pwd_hash, username) sudo(cmd)
def copy_cert_key(): sudo('cp %s/live/%s/chain.pem %s' % (le_dir, kwargs.get('host'), host_ca_path)) sudo('cp %s/live/%s/fullchain.pem %s' % (le_dir, kwargs.get('host'), host_cert_path)) sudo('cp %s/live/%s/privkey.pem %s' % (le_dir, kwargs.get('host'), host_key_path))
def setup_transifex(): """Install transifex client.""" fastprint(blue('Setting up transifex command line client\n')) sudo('pip install transifex-client') fastprint(green('Setting up transifex command line client completed\n'))
def create_cert(): sudo('%s --config %s certonly' % (cerbot, path))
def upgrade_packages(): sudo("apt-get update") sudo("apt-get upgrade -y")
def create_dir(): sudo('mkdir -p %s' % path)
def copy_cert_key(): sudo('mkdir -p %s' % (web_ssl_path)) sudo('cat <<EOF > %s/dhparam.pem\n%s\nEOF' % (web_ssl_path, dhparam_4096_body)) sudo('cp %s/live/%s/fullchain.pem %s' % (le_dir, kwargs.get('host'), host_cert_path)) sudo('cp %s/live/%s/privkey.pem %s' % (le_dir, kwargs.get('host'), host_key_path))
def change_config(): sudo("perl -i -pe 's/example.com/%s/g' %s/10-ssl.conf.backup" % (host, imap_conf_path)) sudo("mv %s/10-ssl.conf.backup %s/10-ssl.conf" % (imap_conf_path, imap_conf_path)) sudo("/etc/init.d/dovecot restart")
def executing(): sudo('cat <<EOF > %s\n%s\nEOF' % (config_path, config_body))
def change_config(): sudo("perl -i -pe 's/%s.example.com/%s/g' %s/sites-available/%s.conf" % (dur, host, nginx_conf_path, dur)) sudo("mv %s/sites-available/%s.conf %s/sites-available/%s.conf" % (nginx_conf_path, dur, nginx_conf_path, host)) sudo("ln -s %s/sites-available/%s.conf %s/sites-enabled/%s.conf" % (nginx_conf_path, host, nginx_conf_path, host)) sudo("/etc/init.d/nginx restart")
def change_config(): sudo("perl -i -pe 's/example.com/%s/g' %s.bkp/main/03_exim4-config_tlsoptions" % (host, smtp_conf_path)) sudo("mv %s.bkp/main/03_exim4-config_tlsoptions %s/main/03_exim4-config_tlsoptions" % (smtp_conf_path, smtp_conf_path)) sudo("/etc/init.d/exim4 restart")