def create_database(db,user,password): #sudo("dropdb %s" % db, user="******") #sudo("dropuser %s" % user, user="******") with fab_settings(warn_only=True): sudo("createuser -s %s" % user, user="******") sudo("createdb -O %s %s -T template_postgis" %(user,db), user="******") sudo("psql -c \"alter user %s with encrypted password '%s'\" " % (user,password), user="******")
def homedir_exists(username): host_string = '{}@{}'.format(settings.FILESERVER_SSH_USER, settings.FILESERVER_HOST) fabric_settings = _get_fabric_settings(host_string) with fab_settings(**fabric_settings): with hide('running', 'stdout', 'stderr'): return run('ls {} &>/dev/null'.format(os.path.join(settings.FILESERVER_HOME_PATH, username)))
def test(options=None, integration=1, selenium=1, test_settings=None): """ Runs manage.py tests. Usage:: fab test fab test:app fab test:app.tests.forms_tests:TestCaseName fab test:integration=0 fab test:selenium=0 """ if test_settings is None: test_settings = settings.TEST_SETTINGS_PATH command = ("coverage run --source='.' manage.py test -v 2 --traceback" + " --failfast --settings={0} --pattern='*_tests.py'".format( test_settings)) if int(integration) == 0: command += " --exclude='integration_tests'" if int(selenium) == 0: command += " --exclude='selenium_tests'" if options: command += ' {0}'.format(options) with fab_settings(warn_only=True): local(command, capture=False) local('coverage html -d coverage --omit="{}"'.format( settings.COVERAGE_EXCLUDES))
def test(options=None, integration=1, selenium=1, test_settings=None): """ Runs manage.py tests. Usage:: fab test fab test:app fab test:app.tests.forms_tests:TestCaseName fab test:integration=0 fab test:selenium=0 """ if test_settings is None: test_settings = settings.TEST_SETTINGS_PATH command = ("./manage.py test -v 2 --traceback --failfast" + " --settings={0}".format(test_settings)) if int(integration) == 0: command += " --exclude='integration_tests'" if int(selenium) == 0: command += " --exclude='selenium_tests'" if options: command += ' {0}'.format(options) with fab_settings(warn_only=True): result = local(command, capture=False) if result.failed: abort(red('Some tests failed')) else: print green('All tests passed')
def import_media(filename=None): """ Extracts media dump into your local media root. Please note that this might overwrite existing local files. Usage:: fab import_media fab import_media:filename=foobar.tar.gz """ if not filename: filename = settings.MEDIA_DUMP_FILENAME project_root = os.getcwd() with fab_settings(hide('everything'), warn_only=True): is_backup_missing = local('test -e "$(echo %s)"' % os.path.join( project_root, filename)).failed if is_backup_missing: abort(red('ERROR: There is no media backup that could be imported in' ' {0}. We need a file called {1} in that folder.'.format( project_root, filename))) # copy the dump into the media root folder with lcd(project_root): local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT)) # extract and remove media dump with lcd(settings.MEDIA_ROOT): local('tar -xvf {0}'.format(filename)) local('rm -rf {0}'.format(filename))
def drop_db(): """Drops the local database.""" with fab_settings(warn_only=True): local('psql {0} -c "DROP DATABASE {1}"'.format( USER_AND_HOST, settings.DB_NAME)) local('psql {0} -c "DROP USER {1}"'.format( USER_AND_HOST, settings.DB_ROLE))
def create_postgis_template(): # Create postgis template with fab_settings(warn_only=True): sudo("createdb -E UTF8 template_postgis", user="******") sudo("createlang -d template_postgis plpgsql", user="******") cstring = "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis'" sudo("psql -d postgres -c %s" % cstring, user="******") sudo( "psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/postgis.sql" % POSTGIS_VER[:3], user="******", ) sudo( "psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/spatial_ref_sys.sql" % POSTGIS_VER[:3], user="******", ) sudo('psql -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"', user="******") sudo('psql -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"', user="******") sudo('psql -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"', user="******") if POSTGIS_VER[:1] == "2": sudo( "psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/rtpostgis.sql" % POSTGIS_VER[:3], user="******", )
def syntax_check(): """Runs flake8 against the codebase.""" with fab_settings(warn_only=True): for file_type in settings.SYNTAX_CHECK: needs_to_abort = False # because egrep fails with exit code 1, we need to allow this as # a successful exit code in our env if 1 not in env.ok_ret_codes: env.ok_ret_codes.append(1) output = local( 'find -name "{}" -print'.format(file_type), capture=True, ) files = output.split() for file in files: if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES): continue result = local('egrep -i -n "{0}" {1}'.format( settings.SYNTAX_CHECK[file_type], file), capture=True) if result: warn(red("Syntax check found in '{0}': {1}".format( file, result))) needs_to_abort = True if needs_to_abort: abort(red('There have been errors. Please fix them and run' ' the check again.')) else: puts(green('Syntax check found no errors. Very good!'))
def setup_virtualenv(): """ Initially creates the virtualenv in the correct places (creating directory structures as necessary) on the remote host. If necessary, installs setup_tools, then pip, then virtualenv (packages) """ print green('In packages module. Installing VirtualEnv on host machine...') require('virtualenv_root', provided_by=('setup_env')) with cd('/tmp'): if env.os == 'ubuntu': sudo('apt-get install -y python-setuptools python-setuptools-devel') elif env.os == 'redhat': sudo('yum install -y python-setuptools python-setuptools-devel') else: utils.abort('Unrecognized OS %s!' % env.os) sudo('easy_install pip') sudo('pip install virtualenv', pty=True, shell=True) print yellow('Require user:%(sudo_user)s password!' % env) with fab_settings(user=env.sudo_user, sudo_prompt='ARemind sudo password: '******'mkdir -p %(www_root)s' % env) sudo('chown -R %(www_root)s %(virtualenv_root)s' % env) sudo('chgrp -R %(www_root)s %(virtualenv_root)s' % env) args = '--clear --distribute' sudo('virtualenv %s %s' % (args, env.virtualenv_root), user=env.sudo_user) print green('In packages module. Done installing VirtualEnv...')
def drop_db(): """Drops the local database.""" local_machine() with fab_settings(warn_only=True): local('psql {0} -c "DROP DATABASE {1}"'.format( USER_AND_HOST, env.db_name)) local('psql {0} -c "DROP USER {1}"'.format( USER_AND_HOST, env.db_role))
def pg_dbrestore_prod(dbname, dump_filename, dbtemplate="template_postgis"): if not dbtemplate == "template_postgis": raise NotImplementedError() with fab_settings(warn_only=True): with cd(settings.DBDUMPS_ROOT): sudo("dropdb {0}".format(dbname), user="******") sudo("createdb -E UTF8 --template={0} --owner={1} {1}".format(dbtemplate, dbname), user="******") sudo("pg_restore --dbname={0} {1}{2}".format(dbname, settings.DBDUMPS_ROOT, dump_filename), user="******")
def get_remote_dump(self): with fab_settings(host_string=self.host): with cd(self.working_dir): print ('- Engine Postgres') run("pg_dump -h {} --port={} --username={} -O {} -f tempdump.sql".format( self.db['HOST'], self.db['PORT'], self.db['USER'], self.db['NAME'])) local("scp -P {} {}@{}:{}/tempdump.sql tempdump.sql".format( self.port, self.user, self.ip, self.working_dir))
def _remove_user_and_group(username=False, project_name=False, project_type='git'): cmd_vars = { 'username': username, 'project_name': project_name, 'project_type': project_type, } with fab_settings(warn_only=True): run('deluser %(username)s' % cmd_vars) run('groupdel project-%(project_name)s' % cmd_vars)
def _configure_service(service_name, backup_postfix, single_value_edits, multi_value_edits, template_dir, template_files): """ Configure a service as defined in the supplied params. :param service_name: eg: Nova :param single_value_edits: dict of configuration instructions :param multi_value_edits: dict of configuration instructions :param template_dir: directory on calling host where templates are found :param template_files: dict of configuration instructions :return: """ backed_up_files = [] with fab_settings(warn_only=True, user="******"): print(green("\nConfiguring %s" % service_name)) # process config changes for single value entries for entry in single_value_edits.items(): file_name = entry[0] backed_up_files = _backup_config_file( file_name, backup_postfix, backed_up_files) # set StrOpt values try: _set_single_value_configs(file_name, entry[1]) except IOError: pass # process config changes for multi value entries for entry in multi_value_edits.items(): file_name = entry[0] backed_up_files = _backup_config_file( file_name, backup_postfix, backed_up_files) # set MultiStrOpt values try: _set_multi_value_configs(file_name, entry[1]) except IOError: pass # upload template files for entry in template_files: file_name = entry['file'] template_name = entry['template'] template_context = entry['context'] if 'context' in entry else {} backed_up_files = _backup_config_file( file_name, backup_postfix, backed_up_files) upload_template( template_name, file_name, context=template_context, template_dir=template_dir, backup=False)
def shell(cmd, capture=not settings.FABRIC_OUTPUT, ignore_return_code=False): logging.debug('Executing shell %s' % cmd) with fab_settings(warn_only=True): out = local(cmd, capture) if out.failed and not ignore_return_code: info = {'cmd': cmd, 'code': out.return_code, 'stderr': out.stderr.decode('utf8'), 'stdout': getattr(out, 'stdout', '').decode('utf8')} raise DVCSException('Executing %(cmd)s failed %(code)d stderr: %(stderr)s stdout:%(stdout)s' % info, **info) return unicode(out, errors='ignore').decode('utf8', 'ignore')
def _run_task(task, args=None, kwargs=None): from fabfile import targets if targets: for target in targets: env = _build_env(target) if env: with fab_settings(** env): _run_task_core(task, args, kwargs) else: _run_task_core(task, args, kwargs)
def puname(filter=None): with fab_settings(hide('running','stderr','stdout','warnings'), warn_only=True): results = execute(_get_uname) #pprint(results) for r in results.iteritems(): if filter is None: print("%s -> %s" % (r[0], r[1].upper())) else: if filter.upper() in r[1].upper(): print("%s -> %s" % (r[0], r[1].upper()))
def install_packages(): """Install packages, given a list of package names""" upload_pip_requires() with cd(env.project_root): with fab_settings(user=env.sudo_user): sudo( 'pip install -E %(virtualenv_root)s --requirement %(pip_requirements_remote_path)s' % env, user=env.sudo_user, pty=True, shell=True)
def restart_apache(*args): """ Restart Apache2 Server """ global target if target: with fab_settings(**_build_env(target)): _restart_apache() else: print "Need to set target first."
def restart_geoserver(*args): """ Restart Tomcat 7 server, which contains GeoServer. """ global target if target: with fab_settings(**_build_env(target)): _restart_geoserver() else: print "Need to set target first."
def restart_nginx(*args): """ Restart NGINX server """ global target if target: with fab_settings(**_build_env(target)): _restart_nginx() else: print "Need to set target first."
def deploy_project(project): # Put apps....change settings to get project apps automagically put(project,PYLIBS,use_sudo=True) with fab_settings(warn_only=True): for projdir in filter(os.path.isdir,glob.glob('*')): if projdir != 'shared' and projdir != 'data': put(projdir,PYLIBS,use_sudo=True) put('requirements.txt',GEONODEDIR,use_sudo=True) with cd(GEONODEDIR), prefix(ACT): sudo('pip install -r requirements.txt') sudo('rm requirements.txt') put('%s/%s.apache' % (project,project),'/etc/apache2/sites-available/%s' % project, use_sudo=True) sed('/etc/apache2/sites-available/%s' % project, 'REPLACE_WITH_SITEDIR', PYLIBS, use_sudo=True)
def inspect(*args): """ Inspects server. Prints operating system major release and disk information. """ global target if target: with fab_settings(**_build_env(target)): _inspect() else: print "Need to set target first."
def get_home_dirs(): host_string = '{}@{}'.format(settings.FILESERVER_SSH_USER, settings.FILESERVER_HOST) fabric_settings = _get_fabric_settings(host_string) separator = '\r\n{}'.format(settings.FILESERVER_HOME_PATH) if settings.FILESERVER_HOME_PATH[-1] != '/': separator = '{}/'.format(separator) with fab_settings(**fabric_settings): with hide('running', 'stdout', 'stderr'): homedirs_str = run('find {} -maxdepth 1 -type d'.format(settings.FILESERVER_HOME_PATH)) homedirs = homedirs_str.split(separator)[1:] return homedirs
def create_postgis_template(): # Create postgis template with fab_settings(warn_only=True): sudo('createdb -E UTF8 template_postgis', user='******') sudo('createlang -d template_postgis plpgsql', user='******') cstring = "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis'" sudo('psql -d postgres -c %s' % cstring, user='******') sudo('psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/postgis.sql' % POSTGIS_VER[:3], user='******') sudo('psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/spatial_ref_sys.sql' % POSTGIS_VER[:3], user='******') sudo('psql -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"', user='******') sudo('psql -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"', user='******') sudo('psql -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"', user='******') if POSTGIS_VER[:1] == '2': sudo('psql -d template_postgis -f /usr/share/postgresql/9.1/contrib/postgis-%s/rtpostgis.sql' % POSTGIS_VER[:3], user='******')
def pg_dbrestore_local(dbname, path_to_dump_file, dbtemplate="template_postgis"): """ Usage: fab pg_dbrestore_local:scenemachine,/home/deploy/dbdumps/scenemachine.dump tip: make sure your local user is also a postgres superuser tip: setup a local cronjob to sync backups 0 0 0 0 * postgres duplicity cf+http://dbdumps /home/rizumu/dbdumps """ if not dbtemplate == "template_postgis": raise NotImplementedError() with fab_settings(warn_only=True): local("dropdb {0}".format(dbname)) local("createdb --template={0} --owner={1} {1}".format(dbtemplate, dbname)) local("pg_restore --dbname={0} {1}".format(dbname, path_to_dump_file))
def deploy_project(project): # Put apps....change settings to get project apps automagically put(project, PYLIBS, use_sudo=True) with fab_settings(warn_only=True): for projdir in filter(os.path.isdir, glob.glob("*")): if projdir != "shared" and projdir != "data": put(projdir, PYLIBS, use_sudo=True) put("requirements.txt", GEONODEDIR, use_sudo=True) with cd(GEONODEDIR), prefix(ACT): sudo("pip install -r requirements.txt") sudo("rm requirements.txt") put("%s/%s.apache" % (project, project), "/etc/apache2/sites-available/%s" % project, use_sudo=True) sed("/etc/apache2/sites-available/%s" % project, "REPLACE_WITH_SITEDIR", PYLIBS, use_sudo=True) with cd(os.path.join(PYLIBS, project)): sudo("ln -s settings_production.py local_settings.py")
def syntax_check(): """Runs flake8 against the codebase.""" with fab_settings(warn_only=True): for file_type in settings.SYNTAX_CHECK: output = local( 'find -name "{}" -print'.format(file_type), capture=True, ) files = output.split() for file in files: if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES): continue result = local('egrep -i -n "{}" {}'.format( settings.SYNTAX_CHECK[file_type], file), capture=True) if result: abort(red("Syntax check found in '{}': {}".format( file, result)))
def upload_files( local=None, manifest=None, drop=None, tries=None, user=None, group=None, notify_level=None, iam=None, topic=None, use_sudo=None, ): """ Uploads files to drop folder on remote server. local: path to local files. Supports wildcards. manifest: path to local manifest file that includes a local file path for each line. Supports wildcards. drop: path on remote server to upload files to. tries: # of tries to attempt for each file. user: user owner of new remote file group: group owner of new remote file notify_level: notification_level. 0 = No notification. 1 = one aggregate report at end of job. 2 = one aggregate report plus reports for each file. iam: AWS IAM user to use for notifications. Matches values in aws.py topic: AWS SNS topic to notify when complete. Matches values in aws.py use_sudo: Use sudo """ global target if target: with fab_settings(**_build_env(target)): _upload_files( target, local=local, manifest=manifest, drop=drop, tries=tries, user=user, group=group, iam=iam, notify_level=notify_level, topic=topic, use_sudo=use_sudo, ) else: print "Need to set target first."
def set_remote_dump(self): if confirm("Attenzione, in questo modo tutti i dati presenti sul database {} verranno sovrascritti. Sei sicuro di voler procedere?".format(self.settings.upper())): with fab_settings(host_string=self.host): with cd(self.working_dir): local("scp -P {} tempdump.sql {}@{}:{}/tempdump.sql".format( self.port, self.user, self.ip, self.working_dir)) run('psql -h {} -p {} -U postgres -c "select pg_terminate_backend(pid) from pg_stat_activity where datname = \'{}\';"'.format( self.db['HOST'], self.db['PORT'], self.db['NAME'])) run('dropdb --if-exists -h {} -p {} -U postgres {}'.format( self.db['HOST'], self.db['PORT'], self.db['NAME'])) print('- Database {} eliminato'.format(self.db['NAME'])) run('createdb -h {} -p {} -U postgres {}'.format( self.db['HOST'], self.db['PORT'], self.db['NAME'])) print('- Database {} creato. carico il dump'.format( self.db['NAME'])) run('psql -h {} -p {} -U postgres -x -e -E -w -d {} -f tempdump.sql -L /dev/null'.format( self.db['HOST'], self.db['PORT'], self.db['NAME']))
def jshint(): """Runs jshint checks.""" with fab_settings(warn_only=True): needs_to_abort = False # because jshint fails with exit code 2, we need to allow this as # a successful exit code in our env if 2 not in env.ok_ret_codes: env.ok_ret_codes.append(2) output = local( 'find -name "{}" -print'.format('*.js'), capture=True, ) files = output.split() jshint_installed = local('command -v jshint', capture=True) if not jshint_installed.succeeded: warn(red( "To enable an extended check of your js files, please" " install jshint by entering:\n\n npm install -g jshint" )) else: for file in files: if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'): excludes = settings.JSHINT_CHECK_EXCLUDES else: excludes = settings.SYNTAX_CHECK_EXCLUDES if any(s in file for s in excludes): continue jshint_result = local( 'jshint {0}'.format(file), capture=True ) if jshint_result: warn(red('JS errors detected in file {0}'.format( file ))) puts(jshint_result) needs_to_abort = True if needs_to_abort: abort(red('There have been errors. Please fix them and run' ' the check again.')) else: puts(green('jshint found no errors. Very good!'))
def deploy_project(project): # Put apps....change settings to get project apps automagically put(project, PYLIBS, use_sudo=True) with fab_settings(warn_only=True): for projdir in filter(os.path.isdir, glob.glob('*')): if projdir != 'shared' and projdir != 'data': put(projdir, PYLIBS, use_sudo=True) put('requirements.txt', GEONODEDIR, use_sudo=True) with cd(GEONODEDIR), prefix(ACT): sudo('pip install -r requirements.txt --upgrade') sudo('rm requirements.txt') put(f'{project}/{project}.apache', f'/etc/apache2/sites-available/{project}', use_sudo=True) sed(f'/etc/apache2/sites-available/{project}', 'REPLACE_WITH_SITEDIR', PYLIBS, use_sudo=True) with cd(os.path.join(PYLIBS, project)): sudo('ln -s settings_production.py local_settings.py')
def create_instance(externalapp_id): with fab_settings( hide('stdout', 'warnings'), show('running', 'stderr'), ): created = False externalapp = ExternalApp.objects.get(pk=externalapp_id) instance_name = externalapp.slug exists = exists_instance(instance_name) if exists: created = False else: instance_type = externalapp.instance_type result = run("openmooc-askbot-instancetool -c %s %s_%s" % (instance_name, instance_type, instance_name)) if result.succeeded: result = run("supervisorctl reload") if result.succeeded: result = run("service nginx restart") if result.succeeded: created = True return created
def import_db(filename=None): """ Imports the database. Make sure that you have this in your ``~/.pgpass`` file: localhost:5433:*:publishizer_publishizer:publishizer Also make sure that the file has ``chmod 0600 .pgpass``. Usage:: fab import_db fab import_db:filename=foobar.dump """ if not filename: filename = settings.DB_DUMP_FILENAME with fab_settings(warn_only=True): local('pg_restore -O -c -U {0} -d {1} {2}'.format( settings.DB_ROLE, settings.DB_NAME, filename))
def build_image(image_yaml_path): """ Given the path to an image.yaml file with everything you need for 'vimage' to make a build, copy it to the remote host and run the vimage tool on it. Then copy the resulting image and compile log up here. """ with open(image_yaml_path) as f: image_data = yaml.safe_load(f) with temp_dir(): try: put(image_yaml_path, 'image_job.yaml', use_sudo=True) sudo('vimage build image_job.yaml', capture_buffer_size=1024) fname = '%s.tar.gz' % image_data['new_image_name'] get(fname, fname) finally: logfile = '%(new_image_name)s.log' % image_data try: # try to get .log even if build fails. with fab_settings(warn_only=True): get(logfile, logfile) except Exception: print_host("Could not retrieve", logfile)
def import_db(filename=None): """ Imports the database. Make sure that you have this in your ``~/.pgpass`` file: localhost:5433:*:publishizer_publishizer:publishizer Also make sure that the file has ``chmod 0600 .pgpass``. Usage:: fab import_db fab import_db:filename=foobar.dump """ local_machine() if not filename: filename = settings.DB_DUMP_FILENAME with fab_settings(warn_only=True): local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format( env.db_role, HOST, env.db_name, filename))
def jshint(): """Runs jshint checks.""" with fab_settings(warn_only=True): needs_to_abort = False # because jshint fails with exit code 2, we need to allow this as # a successful exit code in our env if 2 not in env.ok_ret_codes: env.ok_ret_codes.append(2) output = local( 'find -name "{}" -print'.format('*.js'), capture=True, ) files = output.split() jshint_installed = local('command -v jshint', capture=True) if not jshint_installed.succeeded: warn( red("To enable an extended check of your js files, please" " install jshint by entering:\n\n npm install -g jshint" )) else: for file in files: if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'): excludes = settings.JSHINT_CHECK_EXCLUDES else: excludes = settings.SYNTAX_CHECK_EXCLUDES if any(s in file for s in excludes): continue jshint_result = local('jshint {0}'.format(file), capture=True) if jshint_result: warn(red('JS errors detected in file {0}'.format(file))) puts(jshint_result) needs_to_abort = True if needs_to_abort: abort( red('There have been errors. Please fix them and run' ' the check again.')) else: puts(green('jshint found no errors. Very good!'))
def _get_uname(): with fab_settings(warn_only=True): #with fab_settings(hide('running', 'stderr', 'stdout', 'warnings'), # warn_only=True): return run('uname -a')
def _runner(*args, **kwargs): with fab_settings(abort_exception=CommandRunError): try: return func(*args, **kwargs) except NetworkError as exc: raise NodeConnectionError(str(exc.args))
def configure_stack(goldstone_addr=None, restart_services=None, accept=False): """Configures syslog and ceilometer parameters on OpenStack hosts. :param goldstone_addr: Goldstone server's hostname or IP accessible to OpenStack hosts :type goldstone_addr: str :param restart_services: After completion, do you want to restart openstack? :type restart_services: boolean :param accept: Do you understand that this will change your openstack and syslog configs? :type accept: boolean """ import arrow if not accept: accepted = prompt(cyan( "This utility will modify configuration files on the hosts\n" "supplied via the -H parameter, and optionally restart\n" "OpenStack and syslog services.\n\n" "Do you want to continue (yes/no)?"), default='yes', validate='yes|no') else: accepted = 'yes' if accepted != 'yes': return 0 if restart_services is None: restart_services = prompt(cyan("Restart OpenStack and syslog services " "after configuration changes(yes/no)?"), default='yes', validate='yes|no') if goldstone_addr is None: goldstone_addr = prompt( cyan("Goldstone server's hostname or IP " "accessible to OpenStack hosts?")) backup_timestamp = arrow.utcnow().timestamp with fab_settings(warn_only=True, user="******"): _configure_rsyslog(backup_timestamp, goldstone_addr, restart=restart_services) _configure_ceilometer(backup_timestamp, goldstone_addr, restart=restart_services) _configure_nova(backup_timestamp, restart=restart_services) _configure_neutron(backup_timestamp, restart=restart_services) _configure_cinder(backup_timestamp, restart=restart_services) _configure_glance(backup_timestamp, restart=restart_services) _configure_keystone(backup_timestamp, restart=restart_services) print(green("\nFinshed"))
def get_supervised_procnames(): """Return a list of procnames under supervisor.""" with fab_settings(hide('stdout')): output = supervisorctl('status') return _get_procnames_from_output(output)
def get_children(pid): with fab_settings(hide('warnings', 'stdout')): output = run('ps --ppid {} -o command'.format(pid), warn_only=True) return output.splitlines()[1:]
def supervisorctl(cmd): with fab_settings(command_timeout=SUPERVISORCTL_TIMEOUT): return sudo('supervisorctl ' + cmd)
def uninstall(): """ Removes the app from an environment. """ require('app_path', provided_by=env.available_environments) with fab_settings(warn_only=True): run('rm -rf %(app_path)s/app && rm -rf %(app_path)s/venv' % env)