def deploy(): """ updates the chosen environment """ print(red(" * updating code...")) run('cd %s && git pull' % env.path) if "y" == prompt(blue('Update packages (y/n)?'), default="y"): print(red(" * updating packages...")) run('cd %s ' '&& source venv/bin/activate ' '&& pip install -r requirements.txt --upgrade ' '&& pip install -r server_requirements.txt --upgrade' % env.path) if "y" == prompt(blue('Migrate database schema (y/n)?'), default="y"): print(red(" * migrating database schema...")) run('cd %s ' '&& source venv/bin/activate ' '&& python manage.py migrate' % env.path) print(red(" * compiling less files...")) run('cd %s ' '&& lessc --clean-css static/less/style.less static/css/style.min.css ' '&& lessc --clean-css static/less/partners.less static/css/partners.min.css ' '&& lessc --clean-css static/less/home.less static/css/home.min.css ' '&& lessc --clean-css static/less/sanitair.less static/css/sanitair.min.css' % env.settings_path) print(red(" * collecting static files...")) run('cd %s ' '&& source venv/bin/activate ' '&& python manage.py collectstatic --noinput' % env.path)
def commit(msg=None): """ Commit your changes to git :msg: @todo :returns: @todo """ print '---Commiting---' print msg = msg or prompt('Commit message: ') commit = False commit = prompt('Confirm commit? [y/n]') == 'y' if commit: with settings(warn_only=True): _commit = not local('git commit -a -m "%s"' % msg).failed if not _commit: #nothing was committed commit = False print "Nothing to commit" else: abort('commit aborted') print print '---Done---' return commit
def deploy(): require('hosts') require('path') print(green("\n#####Beginning deployment to %s & %s on %s\n" % (env.mob_domain, env.stubs_domain, env.hosts[0]))) env.mob_release_label = None env.stubs_release_label = None if env.is_test: create_tag = prompt('Tag this release? [y/N] ') if create_tag.lower() == 'y': print("\nShowing latest tags for reference\n") local('git tag | tail -5') env.refspec = prompt('Enter tag name [in format VX_X_X]? ') #Can't use .'s as seperaters as they cause import problems on the server local('git tag %s -am "Tagging version %s in fabfile"' % (env.refspec, env.refspec)) local('git push --tags') env.mob_release_label = mob_package_name + '-' + env.refspec env.stubs_release_label = stubs_package_name + '-' + env.refspec if not env.mob_release_label: # An existing tag must be specified local('git tag | tail -5') env.refspec = prompt('Choose tag to build from: ') local('git tag | grep "%s"' % env.refspec) env.mob_release_label = mob_package_name + '-' + env.refspec env.stubs_release_label = stubs_package_name + '-' + env.refspec # import time # env.release_label = package_name + '-' + time.strftime('%Y%m%d%H%M%S') _upload_tar_from_git() _install_site() _symlink_current_release() restart_webserver() print(green("\n#####Deployment successful for %s & %s\n" % (env.mob_domain, env.stubs_domain)))
def generate(from_url=None, to_url=None, version=None, output=None): ''' Generate deployment package ''' require('exclude') # create tmp directory tmpd = tempfile.mkdtemp() # create html directory local('mkdir %s/html'%tmpd) # create db directory local('mkdir %s/db'%tmpd) # copy files to this directory local('rsync -av html/ %s/html/ --exclude=%s'%(tmpd, ' --exclude='.join(compile_excludes()))) # create database migration script if not from_url: from_url = prompt('Url to migrate from?') if not to_url: to_url = env.site_url migration_dump = os.path.join(tmpd,'db','migration.sql') mysql.migrate(from_url, to_url, output=migration_dump) # migrate the database tmp_db = mysql.create_tmp_db() local('mysql %s < db/last'%tmp_db) local('mysql %s < %s'%(tmp_db, migration_dump)) local('rm %s'%migration_dump) # dump migrated database local('mysqldump --default-character-set=utf8 %s > %s/db/last'%(tmp_db, tmpd)) # create CHANGELOG # TODO: generate change log from PivotalTracker stories changelog = "CHANGELOG generation will be included in final version of the WordPress Package Generator" local('echo "%s" > %s'%(changelog, os.path.join(tmpd, 'CHANGELOG'))) # copy TESTS if os.path.exists('tests.txt'): local('cp tests.txt %s'%os.path.join(tmpd, 'tests.txt')) # write version file if not version: version = prompt('What should I call this version?') local('echo %s > %s'%(version, os.path.join(tmpd, 'VERSION'))) # create tar.gz if not output: output = '.' tarball = "%s-%s-%s.tar.gz"%(env.project, env.name, version) output = os.path.join(output, tarball) local('tar -C %s -pczf %s .'%(tmpd, output))
def install(): api.require('hosts', provided_by=[raspberry_pi]) if exists(UPSTART_FILE_PATH, use_sudo=True): print('"{0}" is already installed, use "update" to deploy changes'.format(UPSTART_SERVICE_NAME)) return upstart_values = {} upstart_values['aws_access_key_id'] = api.prompt('AWS_ACCESS_KEY_ID:') upstart_values['aws_secret_access_key'] = api.prompt('AWS_SECRET_ACCESS_KEY:') upstart_values['local_repo_name'] = LOCAL_REPO_NAME upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values)) api.sudo('echo Yes, do as I say! | apt-get -y --force-yes install upstart') with api.cd(UPSTART_DIRECTORY): upload = api.put(upstart_file, UPSTART_FILE_NAME, use_sudo=True) assert upload.succeeded with api.cd(LOCAL_INSTALL_DIRECTORY): api.sudo('git clone {0} {1}'.format(REMOTE_REPO, LOCAL_REPO_NAME)) with api.cd(LOCAL_REPO_PATH): api.sudo(INSTALL_DEPENDENCIES) api.sudo(START_SERVICE)
def get_seed_data(self): """ Ensure that the seed node has a recent latest restorable time, otherwise allow the user to create a snapshot to restore from. """ if self.seed_node and self.seed_verification: # TODO: Hook to do maintenance announcements, etc self._create_snapshot() return if self.seed_node: restoration_lag = self._get_restorable_lag() if restoration_lag > MAX_RESTORABLE_LAG: logger.critical("DB restoration lag: %s", restoration_lag) logger.critical("DB restoration lag too high.") else: return opts = ['F', 'S', 'E'] action = None while action not in opts: action = prompt( "Create (S)napshot, use (E)xisting snapshot or (F)ail?") if action == 'S': self._create_snapshot() elif action == 'E': self.seed_snapshot_id = prompt("Enter snapshot id:") else: logger.critical("FAIL. DB restoration lag too high") exit(1)
def postgresql(): """PostgreSQL 8.4 + PostGIS 1.5""" with settings(show("user"), hide("warnings", "running", "stdout", "stderr")): project() if "pgpass" not in env.keys(): prompt("Passe PostgreSQL :", default=pgpass, key="pgpass") print(yellow("Configuration PostgreSQL+PostGIS...")) pretty_apt( [ "libpq-dev", "binutils", "gdal-bin", "libproj-dev", "postgresql-8.4-postgis", "postgresql-server-dev-8.4", "python-psycopg2", ] ) fabtools.deb.upgrade() # création d'un utilisateur postgresql avec le meme nom d'utilisateur if not fabtools.postgres.user_exists(env.user): fabtools.postgres.create_user(env.user, env.pgpass) sudo('''psql -c "ALTER ROLE %(user)s CREATEDB;"''' % env, user="******") sudo('''psql -c "ALTER USER %(user)s with SUPERUSER;"''' % env, user="******") print(green('Création d’un superuser "%(user)s" PostgreSQL.' % env)) if not exists(".pgpass"): run('echo "*:*:*:%(user)s:%(pgpass)s" >> .pgpass' % env) sudo("chmod 0600 .pgpass") print(green("Création du fichier .pgpass.")) postgis_template() postgresql_net_access() icanhaz.postgres.server() # start server
def deploy(): """ updates the chosen environment """ print(red(" * checking remote status...")) run("cd %s " "&& git status" % env.path) if "y" == prompt(blue("Commit and push changes (y/n)?"), default="y"): print(red(" * commiting and pushing provise code...")) run("cd %s " '&& git commit -am "translations" ' "&& git pull " "&& git push" % env.path) print(red(" * pushing commits...")) local("git pull " "&& git push") print(red(" * updating code...")) run("cd %s && git pull" % env.path) if "y" == prompt(blue("Update packages (y/n)?"), default="y"): print(red(" * updating packages...")) run( "cd %s " "&& source venv/bin/activate " "&& pip install -r requirements.txt --upgrade " "&& pip install -r server_requirements.txt --upgrade" % env.path ) if "y" == prompt(blue("Migrate database schema (y/n)?"), default="y"): print(red(" * migrating database schema...")) run("cd %s " "&& source venv/bin/activate " "&& python manage.py migrate" % env.path) if not env.debug: print(red(" * compiling less files...")) run("cd %s " "&& lessc --clean-css website/static/less/style.less website/static/css/style.min.css" % env.path) print(red(" * collecting static files...")) run("cd %s " "&& source venv/bin/activate " "&& python manage.py collectstatic --noinput" % env.path)
def _get_svn_user_and_pass(): if not env.has_key('svnuser') or len(env.svnuser) == 0: # prompt user for username prompt('Enter SVN username:'******'svnuser') if not env.has_key('svnpass') or len(env.svnpass) == 0: # prompt user for password env.svnpass = getpass.getpass('Enter SVN password:')
def setup_passwords(): """ create .env and MySQL user; to be called from `setup` or `local_setup` """ print('I will now ask for the passwords to use for database and email account access. If one is empty, I’ll use the non-empty for both. If you leave both empty, I won’t create an database user.') prompt('Please enter DATABASE_PASSWORD for user %(prj_name)s:' % env, key='database_password') prompt('Please enter EMAIL_PASSWORD for user %(user)s:' % env, key='email_password') if env.database_password and not env.email_password: env.email_password = env.database_password if env.email_password and not env.database_password: env.database_password = env.email_password # TODO: check input for need of quoting! with settings(user=env.adminuser, pty=True): # create .env and set database and email passwords run('echo; if [ ! -f %(prj_path)s/.env ]; then echo "DJANGO_SETTINGS_MODULE=settings\nDATABASE_PASSWORD=%(database_password)s\nEMAIL_PASSWORD=%(email_password)s\n" > %(prj_path)s/.env; fi' % env) # create MySQL user if env.dbserver == 'mysql' and env.database_password: env.dbuserscript = '%(homepath)s/userscript.sql' % env run('''echo "\ncreate user '%(prj_name)s'@'localhost' identified by '%(database_password)s'; create database %(prj_name)s character set 'utf8';\n grant all privileges on %(prj_name)s.* to '%(prj_name)s'@'localhost';\n flush privileges;\n" > %(dbuserscript)s''' % env) print('Setting up %(prj_name)s in MySQL. Please enter password for MySQL root:') run('mysql -u root -p -D mysql < %(dbuserscript)s' % env) run('rm %(dbuserscript)s' % env) # TODO: add setup for PostgreSQL setup_paths()
def upload_latest(): "Use sparingly during setup because files will be changed but database will not be migrated nor will the server be restarted. For example before running load_data or load_packages." env.release = time.strftime('%Y%m%d%H%M%S') prompt('Git branch:', 'git_branch', default='master') _upload_archive_from_git() _symlink_current_release() _cleanup()
def install_django(): with lcd(env.virtualenv_path): local("%s/bin/pip install django" % env.virtualenv_path) #if env.db_type!='sqlite3': local("%s/bin/pip install south" % env.virtualenv_path) if env.db_type=="mysql": local("%s/bin/pip install mysql-python" % env.virtualenv_path) if env.db_type=="postgresql_psycopg2": local("%s/bin/pip install psycopg2" % env.virtualenv_path) if env.db_type=="oracle": local("%s/bin/pip install cx_oracle" % env.virtualenv_path) local("%s/bin/pip install fabric" % env.virtualenv_path) prompt("Project Template?: \n 1)1.6 Base\n 2)BadA$$\n 3)TwoScoops\n", key='proj_template', default="0") if env.proj_template == '1': # django 1.6 Base-Template env.proj_template="--template https://github.com/xenith/django-base-template/zipball/master --extension py,md,rst " elif env.proj_template == '2': # BadA$$-Template env.proj_template="--template https://github.com/bruth/badass-django-template/zipball/master -e py,ini,gitignore,in,conf,md,sample,json -n Gruntfile.coffee " elif env.proj_template == '3': # TwoScoops-Template env.proj_template="--template=https://github.com/twoscoops/django-twoscoops-project/archive/master.zip --extension=py,rst,html " else: env.proj_template="" local("%s/bin/python %s/bin/django-admin.py startproject %s %s" % (env.virtualenv_path, env.virtualenv_path, env.proj_template, env.project_name)) # create the projects app with lcd(env.django_path): local("%s/bin/python ../manage.py startapp %s" % (env.virtualenv_path, env.app_name))
def send_meeting(): date = prompt('Date:', validate=r'(\d{4})-(\d{2})') subject = prompt('Subject:') email_from = '*****@*****.**' email_to = '*****@*****.**' filepath = 'meetings/{0}.txt'.format(date) local('git pull') if not os.path.isfile(filepath): return abort('There is no meeting at that date.') with open(filepath, 'r') as f: msg = MIMEText(f.read()) msg['Subject'] = subject msg['From'] = email_from msg['To'] = email_to preview = "{0}".format(msg.as_string()) fastprint(preview) send = prompt('Everything look okay?', validate=r'(yes|no)') if send == 'yes': s = smtplib.SMTP('127.0.0.1') s.sendmail(email_from, [email_to], msg.as_string()) s.quit()
def create_project(): """ Создает новый проект """ # спрашиваем у пользователя название папки с проектом prompt('project root name: ', 'project_root', validate='^([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$') # спрашиваем у пользователя название проекта prompt('project name: ', 'project', validate='^([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$') puts('create project: {0}'.format(env.project)) with lcd(PROJECTS_ROOT): # Создаем директорию с root проектом local('mkdir %s' % env.project_root) with lcd(env.project_root): # Создаем директорию с django проектом и переходим в нее local('mkdir {0}-django'.format(env.project)) with lcd('{0}-django'.format(env.project)): # Создаем виртуальное окружение local('virtualenv -p {0} .venv'.format(PYTHON)) with prefix('.venv/bin/activate'): # Устанавливаем django if DJANGO_VERSION: local('pip install django=={0}'.format(DJANGO_VERSION)) else: local('pip install django') # Создаем проект из шаблона local('django-admin --template={0} {1}'.format(PROJECT_TEMPLATE_PATH, env.project))
def deploy(): """ rsync code to remote host. """ require('root', provided_by='exist_http') extra_opts = '--omit-dir-times' rsync_project( remote_dir=env.root, exclude=RSYNC_EXCLUDE, delete=True, extra_opts=extra_opts, ) pip_install_requirements_prompt = prompt('Run pip install -r requirements.txt? (y/N) ') collectstatic_prompt = prompt('Collectstatic? (y/N) ') migrations_prompt = prompt('Run manage.py migrate? (y/N) ') uwsgi_supervisord = prompt('Restart uwsgi? (y/N) ') if pip_install_requirements_prompt == 'y': pip_install_requirements() if collectstatic_prompt == 'y': collectstatic() if migrations_prompt == 'y': run_migrations() if uwsgi_supervisord == 'y': uwsgi_supervisord_restart()
def create_database_and_dotenv(): env.box_sentry_dsn = prompt('Sentry DSN') env.box_oauth2_client_id = prompt('Google OAuth2 Client ID') env.box_oauth2_client_secret = prompt('Google OAuth2 Client Secret') env.box_database_pw = get_random_string( 20, chars='abcdefghijklmopqrstuvwx01234567890') env.box_secret_key = get_random_string(50) run('psql -c "CREATE ROLE %(box_database)s WITH' ' ENCRYPTED PASSWORD \'%(box_database_pw)s\'' ' LOGIN NOCREATEDB NOCREATEROLE NOSUPERUSER"') run('psql -c "GRANT %(box_database)s TO admin"') run('psql -c "CREATE DATABASE %(box_database)s WITH' ' OWNER %(box_database)s' ' TEMPLATE template0' ' ENCODING \'UTF8\'"') with cd('%(box_domain)s'): put(StringIO('''\ DJANGO_SETTINGS_MODULE=%(box_project_name)s.settings.%(box_environment)s DATABASE_URL=postgres://%(box_database)s:%(box_database_pw)s\ @localhost:5432/%(box_database)s CACHE_URL=hiredis://localhost:6379/1/?key_prefix=%(box_database)s SECRET_KEY=%(box_secret_key)s SENTRY_DSN=%(box_sentry_dsn)s DJANGO_ADMIN_SSO_OAUTH_CLIENT_ID=%(box_oauth2_client_id)s DJANGO_ADMIN_SSO_OAUTH_CLIENT_SECRET=%(box_oauth2_client_secret)s ALLOWED_HOSTS=['%(box_domain)s', '.%(box_domain)s', '.%(host_string_host)s'] # FORCE_DOMAIN = www.%(box_domain)s ''' % dict(env, host_string_host=env.host_string.split('@')[-1])), '.env') run('venv/bin/python manage.py migrate --noinput')
def full(): require('PROJECT') from fabfile import db, config, deploy revision = prompt('Code revision: ', default='HEAD') initial_data = prompt('Path to dbdump.sql.bz2: ', default='tmp/ecofunds_with_centroid_290713.sql.bz2') mail = prompt('Server email: ', default='*****@*****.**') fqdn = env.host hostname = env.PROJECT.instance dbname = env.PROJECT.project dbuser = env.PROJECT.project # root server(hostname, fqdn, mail) # sysadmin application() db_url = db.mysql.create(dbname, dbuser) # appuser config.set('DATABASE_URL', db_url) deploy.rsync_media(upload=True) deploy.deploy(revision) db.mysql.restore(env.PROJECT.project, initial_data)
def install(): api.require('hosts', provided_by=[local]) if exists('/etc/init/forecast-rpc.conf', use_sudo=True): print('"forecast-rpc" is already installed, use the "update" task for changes') return upstart_values = {} upstart_values['home_directory'] = '/home/{0}'.format(api.env.user) upstart_values['loggly_token'] = api.prompt("Loggly token:") upstart_values['loggly_domain'] = api.prompt("Loggly domain:") upstart_values['forecast_key'] = api.prompt("Forecast.io key:") upstart_values['rabbit_url'] = api.prompt("Rabbit URL:") upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values)) with api.cd('/etc/init'): upload = api.put(upstart_file, 'forecast-rpc.conf', use_sudo=True) assert upload.succeeded api.run('git clone https://github.com/projectweekend/Forecast.io-RPC-Service.git') with api.cd('~/Forecast.io-RPC-Service/app'): api.run('npm install') api.sudo('service forecast-rpc start')
def upload_to_github(): """ Upload release to Github. """ # We need github API v3 but no python lib exists yet. So do it manually. import os import urllib2 import base64 import simplejson import getpass # Setup Auth url = 'https://api.github.com/repos/waylan/Python-Markdown/downloads' user = prompt('Github username:'******'Github password:'******'%s:%s' % (user, password)) # Loop through files and upload base = 'dist/' for file in os.listdir(base): file = os.path.join(base, file) if os.path.isfile(file): ans = prompt('Upload: %s' % file, default='Y') if ans.lower() == 'y': # Create document entry on github desc = prompt('Description for %s:' % file) data1 = simplejson.dumps({ 'name': os.path.basename(file), 'size': os.path.getsize(file), 'description' : desc, #'content_type': 'text/plain' # <- let github determine }) req = urllib2.Request(url, data1, {'Content-type': 'application/json'}) req.add_header('Authorization', 'Basic %s' % authstring) try: response = urllib2.urlopen(req) except urllib2.HTTPError, e: error = simplejson.loads(e.read()) if error['errors'][0]['code'] == 'already_exists': print 'Already_exists, skipping...' continue else: print e.read() raise data2 = simplejson.loads(response.read()) response.close() # Upload document (using curl because it is easier) data2['file'] = file curl = """curl \\ -F "key=%(path)s" \\ -F "acl=%(acl)s" \\ -F "success_action_status=201" \\ -F "Filename=%(name)s" \\ -F "AWSAccessKeyId=%(accesskeyid)s" \\ -F "Policy=%(policy)s" \\ -F "Signature=%(signature)s" \\ -F "Content-Type=%(mime_type)s" \\ -F "file=@%(file)s" \\ %(s3_url)s""" % data2 print 'Uploading...' local(curl) else: print 'Skipping...'
def install(): api.require('hosts', provided_by=[raspberry_pi]) if exists('/etc/init/system-rpc.conf', use_sudo=True): print('"system-rpc" is already installed, use the "update" task for changes') return upstart_values = {} upstart_values['loggly_token'] = api.prompt('Loggly token:') upstart_values['loggly_domain'] = api.prompt('Loggly domain:') upstart_values['rabbit_url'] = api.prompt('Rabbit URL:') upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values)) api.sudo('echo Yes, do as I say! | apt-get -y --force-yes install upstart') with api.cd('/etc/init'): upload = api.put(upstart_file, 'system-rpc.conf', use_sudo=True) assert upload.succeeded api.run('git clone https://github.com/projectweekend/Pi-System-RPC-Service.git') with api.cd('~/Pi-System-RPC-Service/app'): api.run('npm install') api.sudo('service system-rpc start')
def setup_rabbitmq(): """ Setup and configure RabbitMQ """ # add official repository to sources.list sudo('echo \'deb http://www.rabbitmq.com/debian/ testing main \' '\ '> /etc/apt/sources.list.d/rabbitmq.list') with cd('/tmp'): sudo('wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc') sudo('apt-key add rabbitmq-signing-key-public.asc') sudo('rm rabbitmq-signing-key-public.asc') sudo('apt-get update') sudo('apt-get -y install rabbitmq-server') # Create new superuser, to replace old one (guest) login = prompt("RabbitMQ Administrator login: "******"RabbitMQ Administrator password: "******".*" ".*" ".*"') sudo('rabbitmqctl delete_user guest') # ulimit -n 1024 # Turn on web console(management plugin) sudo('rabbitmq-plugins enable rabbitmq_management') sudo('/etc/init.d/rabbitmq-server restart')
def upload_file(): print "Checking remote disk space..." run("df -h") local_path = prompt("Enter the local file path:") remote_path = prompt("Enter the remote file path:") put(remote_path=remote_path, local_path=local_path) run("ls %s" %remote_path)
def interactive(_user="******"): scripts = get_scripts() print green("scripts list:\n") for i, s in scripts: print "\t[%s]\t%s" % (green(i), s) _name = prompt("please select a script:", validate=partial(scripts_validate, scripts)) kwargs = dict() variables = get_variables(_name) if len(variables) > 0: print red("\nsome variables need...\n") for key_name, default in variables: if default: kwargs[key_name] = prompt("%s:" % key_name, default=default, validate=notnull_validate) else: kwargs[key_name] = prompt("%s:" % key_name, validate=notnull_validate) if not confirm("you'll run %s with:\n\n\t %s \n\n are you sure? " % \ (_name, "\n\t".join(["%s=>%s" % (k, v) for k, v in kwargs.items()]))): abort("user cancel") else: if not confirm("you'll run %s, are you sure? " % _name): abort("user cancel") s = render(_name, **kwargs) if not env.DEBUG: with settings(hide("running"), sudo_user=_user): sudo(s) else: with settings(sudo_user=_user): sudo(s)
def syncdb(): """Syncs the database with the local one""" dumpdb() local("tar xvfz %(db_name)s-%(time)s.sql.tgz" % env) # get values from local_settings or prompt if empty settings.DATABASE_USER = settings.DATABASES['default']['USER'] settings.DATABASE_PASSWORD = settings.DATABASES['default']['PASSWORD'] settings.DATABASE_NAME = settings.DATABASES['default']['NAME'] if settings.DATABASE_USER: env.local_db_user = settings.DATABASES['default']['USER'] else: # prompt for details env.local_db_user = prompt("Database User:"******"Database password:"******"Database name:") env.local_connection = "mysql -u%(local_db_user)s -p%(local_db_password)s %(local_db_name)s" % env # drop existing database local("%(local_connection)s -e \"drop database %(local_db_name)s; " "create database %(local_db_name)s;\" " % env) # import database local("%(local_connection)s < %(db_name)s-%(time)s.sql" % env) # clean up local("rm %(db_name)s-%(time)s.sql.tgz" % env) local("rm %(db_name)s-%(time)s.sql" % env)
def determine_refspec_to_deploy_from(is_test=False): local("git fetch --tags") if is_test: create_tag = prompt("Tag this release? [y/N] ") if create_tag.lower() == "y": print("Showing latest tags for reference") local("git tag | sort -V | tail -5") refspec = prompt("Tag name [in format x.x.x]? ") local('git tag %(ref)s -m "Tagging version %(ref)s in fabfile"' % {"ref": refspec}) local("git push --tags") else: use_commit = prompt("Build from a specific commit? [y/N] ") if use_commit.lower() == "y": refspec = prompt("Choose commit to build from: ") else: branch = local('git branch | grep "^*" | cut -d" " -f2', capture=True) refspec = local("git describe %s" % branch, capture=True).strip() else: # An existing tag must be specified local("git tag | sort -V | tail -5") refspec = prompt("Choose tag to build from: ") # Check this is valid local('git tag | grep "%s"' % refspec) return refspec
def new_release(): ans = local("git status --porcelain") if ans != "": print "Please commit your code and run again." return ans = prompt("Do you want to increment the version?", default="no", validate=r"(yes|no)") if ans == "yes": update_version() version = get_file_version() local("git add HISTORY.rst") readme() local("git add oopen/__init__.py") local("git add README.rst") local('git commit -m "Incrementing version to {}"'.format(version)) ans = prompt("Do you want to tag this version?", default="no", validate=r"(yes|no)") if ans == "yes": local("git tag -s {}".format(version)) ans = prompt("Do you want to push to github?", default="no", validate=r"(yes|no)") if ans == "yes": push() ans = prompt("Do you want to publish to PYPI?", default="no", validate=r"(yes|no)") if ans == "yes": publish()
def setup_ssh(): """ Ensures the correct private and public keys are on local machine before adding public part to authorized_keys on remote host """ if not exists('~/.ssh/id_rsa'): run('ssh-keygen') pub_key = get_contents('~/.ssh/id_rsa.pub') prompt('You will need to add the public key to your github repo:' + '\n\n{}\n\n> Ok, got it thanks! (press enter to continue)'.format(pub_key)) put(local_path='./deploy/templates/sshd_config.txt', remote_path='/etc/ssh/sshd_config', use_sudo=True) key_name = 'nhs_shev' pem_key = expanduser('~/.ssh/{}.pem'.format(key_name)) pub_key = expanduser('~/.ssh/{}.pub'.format(key_name)) ssh = '{}/.ssh'.format(USER_DIR) ensure_dir(ssh, USERNAME) authorized_keys = '{}/.ssh/authorized_keys'.format(USER_DIR) put(local_path=pub_key, remote_path=authorized_keys) run('chmod 600 {}'.format(authorized_keys)) # set local ssh config local_config = 'Host {}\nUser {}\nIdentityFile {}'.format(env.host_string, USERNAME, pem_key) config_filename = expanduser('~/.ssh/config') print "You'll want to put the following in your file '{}' and then add {} to your ssh-agent:\n\n{}\n\n".format(config_filename, pem_key, local_config) print ("And you'll want to add the following to your sudoers using visudo:\n\n" + "# Allow the ubuntu use to manage upstart applications" + "ubuntu ALL=(ALL:ALL) NOPASSWD: /usr/sbin/service, /sbin/start, /sbin/restart, /sbin/stop\n\n") prompt("> Yep, I've got it thanks! (press enter to continue)")
def install_pkg(pkgs): pkgnames = [] for pkg in pkgs: if pkg.requ: instll = False for req in pkg.requ: c = api.run("rpm -q %s" % req, warn_only=True) if c.failed: instll = True if instll: c=api.sudo("zypper -n in %s" % " ".join(pkg.requ), warn_only=True) if c.failed: warnn('There was an error. (You did press Ctrl+c..) Check the log!') api.prompt('Any key to continue, ctrl-c again to abort') else: warnn('skipping package install. already present: ' + str(pkg.requ)) if pkg.file: lnk = '/'.join((_S.PKG.PREFIX,) + pkg.path + (pkg.file,)) + pkg.ext fname = pkg.file + pkg.ext pkgnames.append(fname) if not files.exists(fname): api.run("wget %s" % lnk) if pkgnames: c = api.sudo("rpm -Uihv %s" % " ".join(pkgnames), warn_only=True) if c.failed: warnn('There was an error. Check the log!') api.prompt('Any key to continue, ctrl-c to abort')
def download_file(): print "Checking local disk space..." local("df -h") remote_path = prompt("Enter the remote file path:") local_path = prompt("Enter the local file path:") get(remote_path=remote_path, local_path=local_path) local("ls %s" %local_path)
def postgresql_setup(): '''PostgreSQL 9.1 + PostGIS 1.5''' with settings(show('user'), hide('warnings', 'running', 'stdout', 'stderr')): set_project() if 'pgpass' not in env.keys(): prompt('Passe PostgreSQL :', default=pgpass, key='pgpass') print(yellow('Configuration PostgreSQL+PostGIS...')) pretty_apt(['postgresql', 'binutils', 'gdal-bin', 'libproj-dev', 'postgresql-9.1-postgis', 'postgresql-server-dev-9.1', 'python-psycopg2', 'libgeoip1']) # print(yellow('Upgrading all packages...')) # fabtools.deb.upgrade() # création d'un utilisateur postgresql avec le meme nom d'utilisateur if not fabtools.postgres.user_exists(env.user): fabtools.postgres.create_user(env.user, env.pgpass) sudo('''psql -c "ALTER ROLE %(user)s CREATEDB;"''' % env, user='******') sudo('''psql -c "ALTER USER %(user)s with SUPERUSER;"''' % env, user='******') print(green('Création d’un superuser "%(user)s" PostgreSQL.' % env)) if not exists('.pgpass'): run('echo "*:*:*:%(user)s:%(pgpass)s" >> .pgpass' % env) sudo('chmod 0600 .pgpass') print(green('Création du fichier .pgpass.')) run('curl https://docs.djangoproject.com/en/dev/_downloads/create_template_postgis-debian.sh -o postgis.sh') run('chmod +x postgis.sh') run('./postgis.sh') #postgresql_net_access() icanhaz.postgres.server() # start server
def server_remove_partitions(): if not is_live_cd(): abort("This server not in LiveCD mode, can't remove partitions") if is_disks_has_no_partitions(): abort("Server disks already have NO partitions, nothing to do") print reply = prompt("\n\nDelete ALL partitions from ALL drives?\n\nEnter 'I AGREE' if you want this:") if reply != "I AGREE": print "Nothing to do" return for device in ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf']: for part_index in [4, 3, 2, 1]: with settings(warn_only=True): run("parted -s /dev/%s -- rm %d" % (device, part_index))
def prompt_for_tag(default_offset=1, stable_only = False): """ Prompt for the tag you want to use, offset for the default by input """ tags = tag_names(10, stable_only) print "Showing latest tags for reference" default = '0.0.1' if tags: default = tags[0] (major, minor, build) = check_tag_format(default) build = build+default_offset new_default = '%s.%s.%s' % (major, minor, build) tag = prompt('Tag name [in format x.xx] (default: %s) ? ' % new_default) tag = tag or new_default return tag
def terminate(self, server): pprint.pprint(self.info(server)) if env.force: sure = 'T' else: sure = prompt(red("Type 'T' to confirm termination"), default='N') if sure == 'T': old_name = server.tags.get("Name", "") new_name = "{}-deleting".format(old_name) server.add_tag("Name", new_name) print(green("Renamed to: {}".format(new_name))) self.connection.terminate_instances(instance_ids=[server.id]) print("Terminated") else: print("Aborting termination")
def create_db(): if not env.db_name: db_name = prompt("Enter the DB name:") else: db_name = env.db_name run('echo "CREATE DATABASE %s default character set utf8 collateutf8_unicode_ci;"|mysql --batch --user=%s --password=%s --host = % s'% (db_name, env.mysqluser, env.mysqlpassword, env.mysqlhost),pty = True) def ls_db(): """ List a dbs with size in MB """ if not env.db_name: db_name = prompt("Which DB to ls?") else: db_name = env.db_name query = """SELECT table_schema"DB Name",Round(Sum(data_length + index_length) / 1024 / 1024, 1) "DB Sizein MB" FROM information_schema.tables WHERE table_schema = \"%s\" GROUP BY table_schema """ % db_name run_sql(db_name, query)
def _restore_db(local_db_name, local_dump_path): params = { 'local_db_name': local_db_name, 'local_dump_path': local_dump_path, } local('dropdb {local_db_name}'.format(**params)) local('createdb {local_db_name}'.format(**params)) local('psql {local_db_name} -f {local_dump_path}'.format(**params)) local('rm {local_dump_path}'.format(**params)) newsuperuser = prompt( 'Any superuser accounts you previously created locally will' ' have been wiped. Do you wish to create a new superuser? (Y/n): ', default="Y") if newsuperuser.strip().lower() == 'y': local('django-admin createsuperuser')
def tag(): '''Tag code''' print('Last release tag is:') with hide('running'): local('git describe --tags `git rev-list --tags --max-count=1`') print( 'Insert release tag number to create.\n' 'Format: X.Y.Z, (sometimes just X.Y).\n' 'If deploying a new iteration, change the Y value.\n' 'If re-deploying with minor changes on same iteration, increment the Z value.' ) new_tag = prompt('Number: ', validate=_validate_tag) if confirm('Will create and push tag %s. Are you sure to continue?' % new_tag): pass
def setup_apache(template='apache.conf.tpl'): """ Setup apache with mod_wsgi to run django """ sudo('apt-get install libapache2-mod-wsgi') server_name = prompt('Enter server name (e.g example.com):', 'server_name') upload_template('fabfiles/conf_templates/%s' % template, env.HOME_PATH, context={ 'server_name': server_name, 'project_path': env.PROJECT_PATH, 'project_name': env.PROJECT_NAME, }) sudo('mv %s /etc/apache2/sites-available/%s.conf' % (template, env.PROJECT_NAME)) sudo( 'ln -s /etc/apache2/sites-available/%s.conf /etc/apache2/sites-enabled/%s.conf' % (env.PROJECT_NAME, env.PROJECT_NAME)) sudo('a2ensite %s' % env.PROJECT_NAME)
def linux_setup(): """ setup is split into checkpoints so that we can resume, but also so that we can reboot the server and then resume. Each checkpoint is responsible for initiating the next one """ checkpoints = open(checkpoint_file, 'r') checkpoint = checkpoints.readline() checkpoints.close() if checkpoint is None: log("Starting at the beginning") _checkpoint_initial() if checkpoint == 1: # TODO: Test to make sure that we don't need to strip newlines before doing this check log("Resuming at Checkpoint 1") _checkpoint_one() branch = prompt("staging or release?", default="release", validate=str)
def _assets_confirm(local_path): """ Check with user about whether to keep local or remote file. """ print '--> This file has been changed locally and on S3.' answer = prompt('Take remote [r] Take local [l] Take all remote [ra] Take all local [la] cancel', default='c') if answer == 'r': return ('remote', False) elif answer == 'l': return ('local', False) elif answer == 'ra': return ('remote', True) elif answer == 'la': return ('local', True) return (None, False)
def search(firstcond='', sure='no', foreman=None, user=None, passwd=None, *conds, **kwconds): """ Use the given foreman search result as the hosts list. :param sure: If set to `yes`, it will not ask for confirmation before running. :param foreman: The foreman server url, like 'http://localhost:3000' :param user: Username to use when logging into foreman, default None (do not authenticate) :param password: Password to use when logging into foreman You can specify multiple condition like strings or parameters, that means that passing *fab on.foreman:'name=cinteg'* as a not named parameter or setting *fab on.foreman:name=cinteg* are the same. Any foreman searchstr string can be used. All the conditions will be agreggated with 'or'. """ conds = list(conds) if sure not in ('yes', 'no'): conds.append(sure) if firstcond: conds.append(firstcond) searchstr = ' or '.join(conds) searchstr += ' or '.join('%s=%s' % item for item in kwconds.iteritems()) if user: auth = (user, passwd) else: auth = None frm = frm_cli.Foreman(foreman, auth, api_version=2) for host in frm.index_hosts(search=searchstr, per_page=999).get('results'): env.hosts.append(host['name']) print(yellow("Query used: \n\t\"%s\"" % searchstr)) print( yellow("Got %d hosts: \n\t" % len(env.hosts) + '\n\t'.join(env.hosts))) if sure != 'yes' and not env.parallel: if prompt('Is what you expected? y|n', default='y').lower() == 'n': abort('Ended by user request.')
def scm_get_ref(scm_type, use_default=False): if scm_type.lower() == "svn": if not use_default: puts("SCM reference must be a path " \ "relative to the project's root URL.") default = "trunk" elif scm_type.lower() == "git": if not use_default: puts("SCM reference must be a named " \ "'branch', 'tag' or 'revision'.") default = "master" if use_default: ref = default else: ref = prompt("SCM ref", default=default) return ref
def release(live=False, tag='tmp'): # if tag == 'tmp': # release = prompt('Please supply a release name', validate=r'^\w+-\d+(\.\d+)?$') answer = prompt( 'Did you remember to first commit all changes??', default='no', ) if answer == 'yes': try: local('git tag %s' % tag) except: pass postgres('backup', tag=tag) docker('tag {image_name}:latest {image_name}:{tag}'.format( image_name=env.image_name, tag=tag)) else: print("# Commit changes using:\n$ git commit -a -m 'message...'")
def _sanity_check(force=False): branch = local("git rev-parse --abbrev-ref HEAD", capture=True) if branch != 'master' and not force: print("Sanity : You are on [%s], not [master]." % branch) return False a = random.randint(1, 9) b = random.randint(1, 9) result = prompt('You\'re deploying on production. \n %d + %d = ?' % (a, b)) if not (int(result) == a + b): print("Sanity : Calm down on deploying") return False return True
def _chose_session(sessions): for s, date in sessions.iteritems(): print " * {0} (started {1})".format(s, date) print def validate(x): if x == '': return False try: if int(x) in sessions: return int(x) else: raise ValueError() except: raise ValueError("Enter the number corresponding to the session") return prompt('Enter a session number:', validate=validate)
def backup_development_db(): """For backing up development db to local.""" print(green('Creating db dump ...')) run('sudo -u postgres pg_dump -Fc %s > ~/db.dump' % DB_NAME) print(blue('Fetching dump from remote to local ..')) local('scp %s:~/db.dump .' % DEVELOPMENT_HOST) sudo('rm -rf ~/db.dump') db_user = prompt('Enter local posgresql superuser: '******'postgres') db_name = '%s_dev' % PROJECT_NAME with warn_only(): local('sudo -u %s dropdb %s' % (db_user, db_name)) local('sudo -u %s createdb -O %s %s' % (db_user, DB_USERNAME, db_name)) revoke_cmd = 'REVOKE CONNECT ON DATABASE %s FROM PUBLIC' % db_name local('sudo -u %s psql -c "%s"' % (db_user, revoke_cmd)) with warn_only(): local('sudo -u %s pg_restore -d %s db.dump' % (db_user, db_name)) local('rm db.dump') print(blue('Successful!!'))
def install_mysql(): sudo('apt-get install -y debconf-utils') with settings(hide('warnings', 'stderr'), warn_only=True): result = sudo('dpkg-query --show mysql-server') if result.failed is False: warn('MySQL is already installed') return mysql_password = prompt('Please enter MySQL root:') sudo('echo "mysql-server-5.5 mysql-server/root_password password ' \ '{}" | debconf-set-selections'.format(mysql_password)) sudo('echo "mysql-server-5.5 mysql-server/root_password_again password ' \ '{}" | debconf-set-selections'.format(mysql_password)) with shell_env(DEBIAN_FRONTEND='noninteractive'): sudo('apt-get -y --no-upgrade install {}'.format(' '.join( ['mysql-server', 'mysql-client']))) sudo('mysql_secure_installation') sudo('apt-get install -y python-mysqldb')
def _load_database_dump(dump_filename): if not os.path.isfile(dump_filename): utils.abort("Couldn't find {0}".format(dump_filename)) db_username = prompt( "Please enter local database username " "(must have permission to drop and create database!)", default="root") if not console.confirm( "About to do something irreversible to the 'toolkit'" "database on your local system. Sure? ", default=False): utils.abort("User aborted") local("rm {0}".format(dump_filename)) local("mysql -u{db_username} -p toolkit < {dump_filename}".format( db_username=db_username, dump_filename=dump_filename))
def observe(): """ Watch the outcome of the attack Run this to setup the stats collector on the target """ fab.env.malaria_home = fab.run("cat /tmp/malaria-tmp-homedir") with fabt.python.virtualenv("%(malaria_home)s/venv" % fab.env): # Default is for the two vagrant machines, default attack command while True: cmd = fab.prompt("Enter command to run in malaria virtual env $", default="malaria subscribe -n 10 -N 20") if cmd.strip(): fab.run(cmd % fab.env) else: fab.puts("Ok, done done!") break
def set_remote_environment(self, available_environments): warnings.warn( "Environment is set on command execution, deprecated feature", DeprecationWarning) environment_list = available_environments.keys() # ask the user to set the remote environment they wish to use while self.remote_environment is False: answer = prompt("Which environment do you want to use: %s" % environment_list) if answer in environment_list: self.remote_environment = answer else: print("Wrong answer, please try again.")
def release(): """ Upload a new release to the PyPI. """ with build(): version = _readversion() assert version.endswith('dev') release_version = version.replace('dev', '') _check_changelog(release_version) _updateversion(release_version, 'release') scm_tag(release_version) local("cd %(build_path)s && ./bin/python setup.py sdist upload" % env) _updateversion( prompt("New development version number?", default=_increment_version(release_version) + 'dev'), 'dev') scm_pull(env.build_path, env.dev_path)
def template_context(app, workers=3, with_blog=False): ctx = { 'PRODUCTION_HOST': env.host in APPS.production_hosts, 'app': app, 'env': env, 'workers': workers, 'auth': '', 'bloghost': '', 'bloguser': '', 'blogpassword': '', } if with_blog: # pragma: no cover for key, default in [ ('bloghost', 'blog.%s' % app.domain), ('bloguser', app.name), ('blogpassword', ''), ]: ctx[key] = os.environ.get(('%s_%s' % (app.name, key)).upper()) or \ prompt('Blog %s:' % key[4:], default=default) assert ctx['blogpassword'] return ctx
def prompt_on_exception(msg): try: yield except Exception, e: logger.warning(msg) logger.warning("Exception thrown: %s", e) continue_choice = 'C' abort_choice = 'A' opts = [continue_choice, abort_choice] prompt_str = "[C]ontinue the deploy or [A]bort?" user_opt = None while not user_opt in opts: user_opt = prompt(prompt_str) if user_opt == abort_choice: logger.critical("Aborting deploy") exit(1) logger.warning("Continuing, despite error")
def send_api_request(kwargs, retry=False): """ Prompt user for confirmation, erase credentials and reauthenticate Returns the authomatic response """ # Refresh the credentials scope and retry if retry: message = 'Forbidden response. Want to update the credentials & retry?' answer = prompt(message, default="No") if answer.lower() not in ('y', 'yes', 'buzz off', 'screw you'): logger.info('Ok so no retry...bye') exit() path = os.path.expanduser(app_config.GOOGLE_OAUTH_CREDENTIALS_PATH) os.remove(path) kwargs['credentials'] = check_credentials() logger.debug('API Request: %s ' % kwargs) resp = app_config.authomatic.access(**kwargs) logger.debug('API Response: %s ' % resp.content) return resp
def destroy_file(self, file_dict): answered = False while answered is False: file_path = file_dict['path'] answer = prompt('Are you sure you want to delete: %s (y/n)' % file_path) if answer == 'y': answered=True sudo("sudo rm %s" % file_path) elif answer == 'n': answered = True print("you answered no") else: print("Wrong answer, please try again.")
def build(version, config, env=defenv): 'Builds this package into a directory tree' if not version: version = prompt("What version did you want packaged there, hotshot?") if not isinstance(config, ConfigParser.SafeConfigParser): if config: config = _get_config(config) else: config = _get_config() # These are the variables that can be percent expanded in the ini config_vars = { 'root': os.path.abspath(env.rootdir), } task = config.get(NAME, 'task', vars=config_vars) with cd(env.srcdir): local("printenv", capture=False) local("fab %s" % task, capture=False)
def deploy(): temp_zip_path = os.path.join(BASE_DIR, "findRice-temp.zip") remote_temp_zip_path = "~/findRice-available/findRice-temp.zip" if os.path.isfile(temp_zip_path): os.remove(temp_zip_path) local( 'cd {base_dir} && zip -r -q findRice-temp findRice -x "./findRice/media/*"' .format(base_dir=BASE_DIR)) if exists(remote_temp_zip_path): run("rm {remote_temp_file}".format( remote_temp_file=remote_temp_zip_path)) put(temp_zip_path, '~/findRice-available/') # os.remove(temp_zip_path) proj_name = "findRice_{0}".format( datetime.datetime.now().strftime("%y%m%d_%H%M%S")) run("unzip -q {0} -d ~/findRice-available/{1}".format( remote_temp_zip_path, proj_name)) remote_proj_path = os.path.join("~/findRice-available", os.path.join(proj_name, "findRice")) with cd(remote_proj_path): run("rm findRice/settings.py") run("mv findRice/settings_remote.py findRice/settings.py") migrate_db = prompt("Did you modify the database? [Y/n]", validate=r'[yYNn]') if migrate_db in ["Y", "y"]: run("workon find_rice_env && python manage.py makemigrations") run("workon find_rice_env && python manage.py migrate") enabled_proj_path = "~/findRice" if exists(enabled_proj_path): run("rm {proj}".format(proj=enabled_proj_path)) run("ln -s {tar} {src}".format(src=enabled_proj_path, tar=remote_proj_path)) run("rm {remote_temp_zip}".format(remote_temp_zip=remote_temp_zip_path)) print "Restarting server" sudo("supervisorctl restart find_rice find_rice_celery")
def interactive_create_db(place, db_host, db_user, db_pass): """ Keep asking the user for a database name until the create function stops returning errors. """ error_code = 1 while error_code == 1: # Ask the user for a clean database txt = "What would you like to call you local database?" print db_name = prompt(txt) db_response = create(place, db_name, db_host, db_user, db_pass) if db_response.return_code == 1: print db_response.stderr print print "Try again...or CTRL-C to exit." else: error_code = db_response.return_code return db_name
def release_address(): conn = get_ec2_connection() prompt_text = "Please select from the following addresses:\n" address_template = " %(ct)d: %(id)s\n" addresses = conn.get_all_addresses() for idx, address in enumerate(addresses): ct = idx + 1 args = {'ct': ct, 'id': str(address)} prompt_text += address_template % args prompt_text += "Choose an address: " def validation(input): choice = int(input) if not choice in range(1, len(addresses) + 1): raise ValueError("%d is not a valid instance" % choice) return choice choice = prompt(prompt_text, validate=validation) addresses[choice - 1].release()
def create(): prompt("Project Title:", "project_title", validate=nonempty) prompt("Project Name:", "project_name", clean(env.project_title), nonempty) prompt("Project URL:", "project_url", env.project_name + '.com', nonempty) prompt("Project User:"******"project_user", getuser(), nonempty) path = join('../', env.project_name) if exists(path): abort("Project alread exists: " + path) local("cp -r %s %s" % (getcwd(), path)) with lcd(path): theme_tpl = join('../', 'wordpress-theme-template') theme = join('wp-content/themes/', env.project_name) local("cp -r %s %s" % (theme_tpl, theme)) local("mv project-fabfile.py fabfile.py") do_string_replacement([ '.htaccess', 'hgrc', 'create.sh', 'local.wp-config.php', 'production.wp-config.php', 'wp-cli.yml', 'fabfile.py', join(theme, 'style.css') ]) local("rm -rf .git") local("rm -rf " + join(theme, '.git')) local("rm wp-config-sample.php") local("rm .gitignore") local("rm " + join(theme, '.gitignore')) local("rm " + join(theme, '.editorconfig')) local("hg init") local("mv hgrc .hg/") local("hg add .") local("hg commit -m'initial import'") local("cp local.wp-config.php wp-config.php") local("cp -r ../wordpress wordpress")
def tearDownDatabase(): ''' undeploy database on target $MSOD_DB_HOST ''' sbHome = getSbHome() answer = prompt('Undeploy database on ' + os.environ.get('MSOD_DB_HOST') + ' (y/n)?') if answer != 'y' and answer != 'Y': print "aborting" return # Stop All Running Services sudo('service dbmonitor stop') sudo('service mongod stop') # Remove All Services sudo('chkconfig --del dbmonitor') sudo('chkconfig --del mongod') # Uninstall All Installed Utilities with settings(warn_only=True): sudo('rm /usr/bin/dbmonitor') sudo('rm /etc/init.d/dbmonitor') sudo('rm /etc/mongod.conf') sudo( 'yum remove -y python-setuptools readline-devel tk-devel gdbm-devel db4-devel libpcap-devel' ) sudo( 'yum remove -y zlib-devel bzip2-devel openssl-devel ncurses-devel sqlite-devel xz-devel policycoreutils-python' ) sudo('yum erase -y $(rpm -qa | grep mongodb-enterprise)') sudo('yum erase -y $(rpm -qa | grep mongodb-org)') sudo('/usr/local/bin/pip uninstall -y pymongo') sudo('/usr/local/bin/pip uninstall -y python-daemon') # Remove SPECTRUM_BROWSER_HOME Directory with settings(warn_only=True): sudo('rm -r ' + sbHome + ' /spectrumdb /etc/msod') sudo('userdel -r spectrumbrowser') sudo('userdel -r mongod') # Clean Remaining Files sudo('rm -rf /var/log/mongodb') sudo('rm -f /var/log/dbmonitoring.log')