def get_metadata(idp_fqdn=FQDN): """ Télécharge dans /etc/ssl le fichier metadata.xml """ runcmd('wget --no-check-certificate ' 'https://%(idp_fqdn)s/idp/saml2/metadata -O- > ' '/etc/ssl/saml-%(idp_fqdn)s-metadata.xml' % {'idp_fqdn': idp_fqdn})
def checkout_layer_branch(layerbranch, repodir, logger=None): if layerbranch.actual_branch: branchname = layerbranch.actual_branch else: branchname = layerbranch.branch.name out = utils.runcmd("git checkout origin/%s" % branchname, repodir, logger=logger) out = utils.runcmd("git clean -f -x", repodir, logger=logger)
def add_mod_mellon(): """ Active le mod mellon (Note: présentement, ce module est compilé sur le serveur) """ mellon.install_build_deps() mellon.build_mellon() mellon.install_mellon() runcmd('a2enmod auth_mellon')
def create_user(): """ Création du user git """ with settings(warn_only=True): runcmd('adduser ' '--home={home} --disabled-password ' '--gecos "" ' '{username}'.format(username=GIT_USER, home=GIT_HOME))
def install(): """ Installation du paquet git Création du user git Création du home de git """ runcmd('apt-get -y install git') create_user() with settings(warn_only=True): sudo('mkdir %s' % GIT_LOG)
def install_deb_toolkit(): """ Kit pour construire les paquets deb """ runcmd('apt-get install -y ' 'devscripts ' 'dpkg-dev ' 'dput ' 'fakeroot ' 'debhelper ' 'build-essential')
def mpi_compile(username, type, src_path, exe_path='a.out'): """ Compile the source code. The source will be compiled by ``mpicc -Wall -Wextra -O3 $src_path -o $exe_path -lm`` (if type is 'cpp', use mpic++ instead) :param username: Name of the user. :param type: Source type (``c`` or ``cpp``). :param src_path: Source path. :param exe_path: Outfile path. (default: ``a.out``). :rtype: ``{'stdout': string of stdout, 'stderr': string of stderr}`` """ ret = {'status': 100, 'data': {}} nova = stack_api.get_nova('dkg') server = stack_api.get_server_by_name(nova, username) host = server.addresses['CloudOS_2013'][0]['addr'] # compile cmd = ( '{cc} -Wall -Wextra -O3 {src_path} -o {exe_path} -lm' ' > /tmp/stdout 2> /tmp/stderr' ).format( cc = 'mpic++' if type == 'cpp' else 'mpicc', src_path = src_path, exe_path = exe_path, ) try: msg = utils.runcmd_on_host(host, cmd) except CalledProcessError: #ret['status'] = 500 #ret['data'] = 'runcmd on host error.' + cmd #return ret pass #we assume the cmd is always right, or we can't get compile error. # get result stdout = '/tmp/stdout-' + utils.gen_rand_str(6) stderr = '/tmp/stdout-' + utils.gen_rand_str(6) try: msg = utils.scp_from_server(host, '/tmp/stdout', stdout) msg = utils.scp_from_server(host, '/tmp/stderr', stderr) except CalledProcessError: ret['status'] = 500 ret['data'] = msg return ret # prepare for return ret['data']['stdout'] = open(stdout).read() ret['data']['stderr'] = open(stderr).read() utils.runcmd(['rm', '-f', stdout, stderr]) ret['status'] = 200 return ret
def create_user(user): """ Création d'un utilisateur *user* avec un mode de passe identique à *user* """ with settings(warn_only=True): runcmd('mysql --user={root} --password={password} ' '--execute="CREATE USER ' '\'{user}\' IDENTIFIED BY \'{userpass}\'"' .format(root=MYSQL_ROOT_USER, password=MYSQL_ROOT_PASSWORD, user=user, userpass=user))
def grant_user(db): """ donner toutes les permissions au user *db* sur la base de données *db*. """ runcmd('mysql --user={root} --password={password} ' '--execute="GRANT ALL ON {database}.* TO ' '\'{user}\'@\'127.0.0.1\' IDENTIFIED BY \'{userpass}\'"' .format(root=MYSQL_ROOT_USER, password=MYSQL_ROOT_PASSWORD, database=db, user=db, userpass=db))
def doubleClickItem(self,row,col): if col==0: s=QtCore.QSettings() diff=str(s.value('diff').toString()) if diff: #xterm=(s.value('xterm')=='True') cmdlist=diff.split(' ') filename=self.fileTable.item(row,col).text() #cmdlist.append("git diff {} ; echo Press Enter to close ; read".format(filename)) cmdlist.append(filename) #print cmdlist utils.runcmd(self.root,cmdlist)
def create_database(db): """ Création d'une base de donnée nommée *db* """ with settings(warn_only=True): runcmd('mysql ' '--user={root} ' '--password={password} ' '--execute="create database {database}"' .format(root=MYSQL_ROOT_USER, password=MYSQL_ROOT_PASSWORD, database=db))
def setup_with_mellon(fqdn, wsgi, vhost_tpl="django_mellon_vhost.txt"): """ """ ssl.create_certificats(fqdn) idp.get_metadata() idp.register_sp(fqdn) extra = { 'idp_fqdn': idp.FQDN, 'wsgi': wsgi, } with(apache.get_home_sites()): runcmd('ln -s /vagrant %s' % fqdn) apache.add_vhost(fqdn, vhost_tpl, extra)
def bulk_change_patch_view(request, pk): import os import os.path import utils changeset = get_object_or_404(RecipeChangeset, pk=pk) # FIXME this couples the web server and machine running the update script together, # but given that it's a separate script the way is open to decouple them in future try: ret = utils.runcmd('python bulkchange.py %d %s' % (int(pk), settings.TEMP_BASE_DIR), os.path.dirname(__file__)) if ret: fn = ret.splitlines()[-1] if os.path.exists(fn): if fn.endswith('.tar.gz'): mimetype = 'application/x-gzip' else: mimetype = 'text/x-diff' response = HttpResponse(mimetype=mimetype) response['Content-Disposition'] = 'attachment; filename="%s"' % os.path.basename(fn) with open(fn, "rb") as f: data = f.read() response.write(data) os.remove(fn) return response return HttpResponse('No patch data generated', content_type='text/plain') except Exception as e: output = getattr(e, 'output', None) if output: if 'timeout' in output: return HttpResponse('Failed to generate patches: timed out waiting for lock. Please try again shortly.', content_type='text/plain') return HttpResponse('Failed to generate patches: %s' % e, content_type='text/plain')
def add_vhost(fqdn, filename, extra_context={}): """ Création d'un vhost selon le template *filename* pour le projet *fqdn*. *project* et *project_path* sont disponibles dans le template mais vous pouvez en ajouter un aussi avec *extra_context*. """ filename = os.path.join(TPL_DIR, filename) destination = os.path.join(APACHE_VHOST_DIR, fqdn) project_path = os.path.join(get_home_sites(), fqdn) data = {'project': fqdn, 'project_path': project_path, } data.update(extra_context) upload_template( filename, destination, context=data, use_sudo=True) runcmd('a2ensite %s' % fqdn)
def fetch_repo(vcs_url, repodir, urldir, fetchdir, layer_name): logger.info("Fetching remote repository %s" % vcs_url) try: if not os.path.exists(repodir): utils.runcmd(['git', 'clone', vcs_url, urldir], fetchdir, logger=logger, printerr=False) else: utils.runcmd(['git', 'fetch', '-p'], repodir, logger=logger, printerr=False) return (vcs_url, None) except subprocess.CalledProcessError as e: logger.error("Fetch of layer %s failed: %s" % (layer_name, e.output)) return (vcs_url, e.output)
def cf_token(): ''' ''' try: cf_login() cmd = ('/home/vcap/app/cf oauth-token') rc, data, err = runcmd(cmd) except: print("[ERROR] {}").format(err) finally: return "".join(data.split('\n'))
def _get_version(): try: stdout, stderr, rt = runcmd(['git', 'describe']) except RuntimeError as e: if str(e).startswith('fatal: No names found, cannot describe anything.'): return 'git-' + get_githead()[:8] except OSError: # e.g. not a git repo return "Unknown" return stdout
def cf_login(): ''' ''' try: cmd = ('/home/vcap/app/cf login -a {0} --skip-ssl-validation' ' -u {1} -p {2} -o system -s system').format( cf_api_endpoint, cf_user, cf_password) rc, data, err = runcmd(cmd) except: print("[ERROR] {}").format(err) finally: return rc
def get_reviews(cmd, filt, value): stdout = utils.runcmd(cmd) reviews = [] for line in stdout.split('\n'): if not line: continue try: packet = json.loads(line) if filt(packet, value): reviews.append(packet) except ValueError as e: print 'Could not decode:' print ' %s' % line print ' Error: %s' % e return reviews
def install(): """ Installe le serveur MySQL selon des accès prédéfinis """ runcmd('debconf-set-selections <<< \'mysql-server-{version} ' 'mysql-server/root_password password {password}\''.format( password=MYSQL_ROOT_PASSWORD, version=MYSQL_VERSION)) runcmd('debconf-set-selections <<< \'mysql-server-{version} ' 'mysql-server/root_password_again password {password}\''.format( password=MYSQL_ROOT_PASSWORD, version=MYSQL_VERSION)) runcmd('apt-get -y install mysql-server')
def add_mod_ssl(): """ Active le mod rewrite """ runcmd('a2enmod ssl')
def install(): """ Installe le paquet apache2 """ runcmd('apt-get -y install apache2')
def add_mod_rewrite(): """ Active le mod rewrite """ runcmd('a2enmod rewrite')
def install(idp_fqdn=FQDN): """ Installe un serveur d'indentité disponible à *idp_fqdn*. """ # DEPS lasso.install() mysql.install() ssl.install() git.install() apache.install() apache.add_mod_rewrite() apache.add_mod_ssl() apache.add_mod_wsgi() venv.install_virtualenv() runcmd('apt-get install -y python-ldap') runcmd('apt-get install -y python-mysqldb --force-yes') # DB db_name = idp_fqdn.replace('.', '_') mysql.setup_db(db_name) # WEB ssl.create_certificats(idp_fqdn) extra = {'venv': venv.get_path(idp_fqdn), } apache.add_vhost(idp_fqdn, 'idp_vhost.txt', extra) # SOURCE git.clone('git://git.auf.org/authentic2', idp_fqdn) git.checkout(idp_fqdn, 'master') # VIRTUALENV venv.mkenv(idp_fqdn) bin_pip = venv.get_bin_pip(idp_fqdn) with cd(git.home(idp_fqdn)): git.sudo("%s install -r requirements.txt" % bin_pip) git.sudo("%s install django-auth-ldap" % bin_pip) # WSGI data = { 'project_path': git.home(idp_fqdn), 'venv': venv.get_path(idp_fqdn), } filename = os.path.join(TPL_DIR, 'idp_wsgi.txt') destination = os.path.join(venv.get_bin_path(idp_fqdn), 'idp_wsgi.py') upload_template( filename, destination, context=data, use_sudo=True,) runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,)) runcmd('chmod 644 %s' % (destination, )) # LOG file log_file = os.path.join(git.home(idp_fqdn), 'log.log') runcmd('touch %s' % log_file) runcmd('chmod g+w %s' % log_file) runcmd('chown %s:www-data %s' % (git.GIT_USER, log_file)) # CONF data.update({ 'db_name': db_name, 'db_user': db_name, 'db_password': db_name, }) filename = os.path.join(TPL_DIR, 'idp_local_settings.txt') destination = os.path.join( git.home(idp_fqdn), 'aufcustom', 'local_settings.py') upload_template( filename, destination, context=data, use_sudo=True,) runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,)) # manage.py data.update({ 'venv': venv.get_path(idp_fqdn), }) filename = os.path.join(TPL_DIR, 'idp_manage.txt') destination = os.path.join( git.home(idp_fqdn), 'manage.py') upload_template( filename, destination, context=data, use_sudo=True,) runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,)) runcmd('chmod +x %s' % (destination,)) git.sudo('%s syncdb --migrate --noinput' % (destination,)) git.sudo('%s collectstatic --noinput' % (destination,)) apache.restart()
for review in uncategorized_reviews: print_review(review, '') if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--username', default='mikalstill', help='Your gerrit username') ARGS = parser.parse_args() with open(os.path.expanduser('~/.reviewtargets')) as f: CONFIG = json.loads(f.read()) print utils.runcmd('cd ~/cache/nova-specs; ' 'git checkout master; ' 'git pull') for ent in os.listdir( os.path.expanduser('~/cache/nova-specs' '/specs/%s' % RELEASE)): if not ent.endswith('.rst'): continue APPROVED_SPECS.append(ent[:-4]) possible = reviews.component_reviews('openstack/nova-specs') for review in filter_obvious(possible): try: bp_name = review.get('topic', 'bp/nosuch').split('/')[1] except: bp_name = review.get('topic', '')
def graceful(): """ Redémarrage en douceur """ runcmd('/etc/init.d/apache2 graceful')
def install(): """ Installe openssl """ runcmd('apt-get -y install openssl')
def get_list(self): li = utils.runcmd('machinectl', ['list-images']) return [i['NAME'] for i in li]
def init_parser(settings, branch, bitbakepath, enable_tracking=False, nocheckout=False, classic=False, logger=None): if not (nocheckout or classic): # Check out the branch of BitBake appropriate for this branch and clean out any stale files (e.g. *.pyc) if re.match('[0-9a-f]{40}', branch.bitbake_branch): # SHA1 hash bitbake_ref = branch.bitbake_branch else: # Branch name bitbake_ref = 'origin/%s' % branch.bitbake_branch out = utils.runcmd("git checkout %s" % bitbake_ref, bitbakepath, logger=logger) out = utils.runcmd("git clean -f -x", bitbakepath, logger=logger) # Skip sanity checks os.environ['BB_ENV_EXTRAWHITE'] = 'DISABLE_SANITY_CHECKS' os.environ['DISABLE_SANITY_CHECKS'] = '1' fetchdir = settings.LAYER_FETCH_DIR if not classic: # Ensure we have OE-Core set up to get some base configuration core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if not core_layer: raise RecipeParseError( "Unable to find core layer %s in database; create this layer or set the CORE_LAYER_NAME setting to point to the core layer" % settings.CORE_LAYER_NAME) core_layerbranch = core_layer.get_layerbranch(branch.name) core_branchname = branch.name if core_layerbranch: core_subdir = core_layerbranch.vcs_subdir if core_layerbranch.actual_branch: core_branchname = core_layerbranch.actual_branch else: core_subdir = 'meta' core_urldir = core_layer.get_fetch_dir() core_repodir = os.path.join(fetchdir, core_urldir) core_layerdir = os.path.join(core_repodir, core_subdir) if not nocheckout: out = utils.runcmd("git checkout origin/%s" % core_branchname, core_repodir, logger=logger) out = utils.runcmd("git clean -f -x", core_repodir, logger=logger) if not os.path.exists(os.path.join(core_layerdir, 'conf/bitbake.conf')): raise RecipeParseError( "conf/bitbake.conf not found in core layer %s - is subdirectory set correctly?" % core_layer.name) # The directory above where this script exists should contain our conf/layer.conf, # so add it to BBPATH along with the core layer directory confparentdir = os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) os.environ['BBPATH'] = str("%s:%s" % (confparentdir, core_layerdir)) # Change into a temporary directory so we don't write the cache and other files to the current dir if not os.path.exists(settings.TEMP_BASE_DIR): os.makedirs(settings.TEMP_BASE_DIR) tempdir = tempfile.mkdtemp(dir=settings.TEMP_BASE_DIR) saved_cwd = os.getcwd() os.chdir(tempdir) tinfoil = utils.setup_tinfoil(bitbakepath, enable_tracking) os.chdir(saved_cwd) # Ensure TMPDIR exists (or insane.bbclass will blow up trying to write to the QA log) oe_tmpdir = tinfoil.config_data.getVar('TMPDIR', True) if not os.path.exists(oe_tmpdir): os.makedirs(oe_tmpdir) # Ensure BBFILES as an initial value so that the old mode of BBFILES := "${BBFILES} ..." works if not tinfoil.config_data.getVar('BBFILES', False): tinfoil.config_data.setVar('BBFILES', '') return (tinfoil, tempdir)
def enable(self, name): utils.runcmd('systemctl', ['enable', name])
def init_parser(settings, branch, bitbakepath, enable_tracking=False, nocheckout=False, classic=False, logger=None): if not (nocheckout or classic): # Check out the branch of BitBake appropriate for this branch and clean out any stale files (e.g. *.pyc) if re.match("[0-9a-f]{40}", branch.bitbake_branch): # SHA1 hash bitbake_ref = branch.bitbake_branch else: # Branch name bitbake_ref = "origin/%s" % branch.bitbake_branch out = utils.runcmd("git checkout %s" % bitbake_ref, bitbakepath, logger=logger) out = utils.runcmd("git clean -f -x", bitbakepath, logger=logger) # Skip sanity checks os.environ["BB_ENV_EXTRAWHITE"] = "DISABLE_SANITY_CHECKS" os.environ["DISABLE_SANITY_CHECKS"] = "1" fetchdir = settings.LAYER_FETCH_DIR if not classic: # Ensure we have OE-Core set up to get some base configuration core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if not core_layer: raise RecipeParseError( "Unable to find core layer %s in database; create this layer or set the CORE_LAYER_NAME setting to point to the core layer" % settings.CORE_LAYER_NAME ) core_layerbranch = core_layer.get_layerbranch(branch.name) core_branchname = branch.name if core_layerbranch: core_subdir = core_layerbranch.vcs_subdir if core_layerbranch.actual_branch: core_branchname = core_layerbranch.actual_branch else: core_subdir = "meta" core_urldir = core_layer.get_fetch_dir() core_repodir = os.path.join(fetchdir, core_urldir) core_layerdir = os.path.join(core_repodir, core_subdir) if not nocheckout: out = utils.runcmd("git checkout origin/%s" % core_branchname, core_repodir, logger=logger) out = utils.runcmd("git clean -f -x", core_repodir, logger=logger) if not os.path.exists(os.path.join(core_layerdir, "conf/bitbake.conf")): raise RecipeParseError( "conf/bitbake.conf not found in core layer %s - is subdirectory set correctly?" % core_layer.name ) # The directory above where this script exists should contain our conf/layer.conf, # so add it to BBPATH along with the core layer directory confparentdir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) os.environ["BBPATH"] = str("%s:%s" % (confparentdir, core_layerdir)) # Change into a temporary directory so we don't write the cache and other files to the current dir if not os.path.exists(settings.TEMP_BASE_DIR): os.makedirs(settings.TEMP_BASE_DIR) tempdir = tempfile.mkdtemp(dir=settings.TEMP_BASE_DIR) os.chdir(tempdir) tinfoil = _setup_tinfoil(bitbakepath, enable_tracking) # Ensure TMPDIR exists (or insane.bbclass will blow up trying to write to the QA log) oe_tmpdir = tinfoil.config_data.getVar("TMPDIR", True) if not os.path.exists(oe_tmpdir): os.makedirs(oe_tmpdir) # Ensure BBFILES as an initial value so that the old mode of BBFILES := "${BBFILES} ..." works if not tinfoil.config_data.getVar("BBFILES", False): tinfoil.config_data.setVar("BBFILES", "") return (tinfoil, tempdir)
def main(): valid_layer_name = re.compile('[-\w]+$') parser = optparse.OptionParser(usage=""" %prog [options] <url> [name]""") utils.setup_django() layer_type_help, layer_type_choices = get_layer_type_choices() parser.add_option("-s", "--subdir", help="Specify subdirectory", action="store", dest="subdir") parser.add_option("-t", "--type", help=layer_type_help, choices=layer_type_choices, action="store", dest="layer_type", default='') parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("", "--github-auth", help="Specify github username:password", action="store", dest="github_auth") parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option("-a", "--actual-branch", help="Set actual branch", action="store", dest="actual_branch") options, args = parser.parse_args(sys.argv) if len(args) < 2: print("Please specify URL of repository for layer") sys.exit(1) layer_url = args[1] if len(args) > 2: layer_name = args[2] else: if options.subdir: layer_name = options.subdir else: layer_name = [x for x in layer_url.split('/') if x][-1] if layer_name.endswith('.git'): layer_name = layer_name[:-4] if not valid_layer_name.match(layer_name): logger.error( 'Invalid layer name "%s" - Layer name can only include letters, numbers and dashes.', layer_name) sys.exit(1) if options.github_auth: if not ':' in options.github_auth: logger.error( '--github-auth value must be specified as username:password') sys.exit(1) splitval = options.github_auth.split(':') github_login = splitval[0] github_password = splitval[1] else: github_login = None github_password = None import settings from layerindex.models import LayerItem, LayerBranch, LayerDependency, LayerMaintainer from django.db import transaction logger.setLevel(options.loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) master_branch = utils.get_branch('master') core_layer = None try: with transaction.atomic(): # Fetch layer logger.info('Fetching repository %s' % layer_url) layer = LayerItem() layer.name = layer_name layer.status = 'P' layer.summary = 'tempvalue' layer.description = layer.summary set_vcs_fields(layer, layer_url) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) out = None try: if not os.path.exists(repodir): out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", repodir, logger=logger) except Exception as e: logger.error("Fetch failed: %s" % str(e)) sys.exit(1) actual_branch = 'master' if (options.actual_branch): actual_branch = options.actual_branch try: out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) except subprocess.CalledProcessError: actual_branch = None branches = utils.runcmd("git branch -r", repodir, logger=logger) for line in branches.splitlines(): if 'origin/HEAD ->' in line: actual_branch = line.split('-> origin/')[-1] break if not actual_branch: logger.error( "Repository has no master branch nor origin/HEAD") sys.exit(1) out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) layer_paths = [] if options.subdir: layerdir = os.path.join(repodir, options.subdir) if not os.path.exists(layerdir): logger.error( "Subdirectory %s does not exist in repository for master branch" % options.subdir) sys.exit(1) if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')): logger.error( "conf/layer.conf not found in subdirectory %s" % options.subdir) sys.exit(1) layer_paths.append(layerdir) else: if os.path.exists(os.path.join(repodir, 'conf/layer.conf')): layer_paths.append(repodir) # Find subdirs with a conf/layer.conf for subdir in os.listdir(repodir): subdir_path = os.path.join(repodir, subdir) if os.path.isdir(subdir_path): if os.path.exists( os.path.join(subdir_path, 'conf/layer.conf')): layer_paths.append(subdir_path) if not layer_paths: logger.error( "conf/layer.conf not found in repository or first level subdirectories - is subdirectory set correctly?" ) sys.exit(1) if 'github.com' in layer.vcs_url: json_data, owner_json_data = get_github_layerinfo( layer.vcs_url, github_login, github_password) for layerdir in layer_paths: layer.pk = None if layerdir != repodir: subdir = os.path.relpath(layerdir, repodir) if len(layer_paths) > 1: layer.name = subdir else: subdir = '' if LayerItem.objects.filter(name=layer.name).exists(): if LayerItem.objects.filter(name=layer.name).exclude( vcs_url=layer.vcs_url).exists(): conflict_list = LayerItem.objects.filter( name=layer.name).exclude(vcs_url=layer.vcs_url) conflict_list_urls = [] for conflict in conflict_list: conflict_list_urls.append(conflict.vcs_url) cln = ', '.join(conflict_list_urls) logger.error( 'A layer named "%s" already exists in the database. Possible name collision with %s.vcs_url = %s' % (layer.name, layer.name, cln)) sys.exit(1) else: logger.info( 'The layer named "%s" already exists in the database. Skipping this layer with same vcs_url' % layer.name) layer_paths = [x for x in layer_paths if x != layerdir] continue logger.info('Creating layer %s' % layer.name) # Guess layer type if not specified if options.layer_type: layer.layer_type = options.layer_type elif layer.name in ['openembedded-core', 'meta-oe']: layer.layer_type = 'A' elif glob.glob(os.path.join(layerdir, 'conf/distro/*.conf')): layer.layer_type = 'D' elif glob.glob(os.path.join(layerdir, 'conf/machine/*.conf')): layer.layer_type = 'B' else: layer.layer_type = 'M' layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch if layerdir != repodir: layerbranch.vcs_subdir = subdir if actual_branch: layerbranch.actual_branch = actual_branch layerbranch.save() if layer.name != settings.CORE_LAYER_NAME: if not core_layer: core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if core_layer: logger.debug('Adding dep %s to %s' % (core_layer.name, layer.name)) layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() layerconfparser = LayerConfParse(logger=logger) try: config_data = layerconfparser.parse_layer(layerdir) if config_data: utils.add_dependencies(layerbranch, config_data, logger=logger) utils.add_recommends(layerbranch, config_data, logger=logger) finally: layerconfparser.shutdown() # Get some extra meta-information readme_files = glob.glob(os.path.join(layerdir, 'README*')) if (not readme_files) and subdir: readme_files = glob.glob(os.path.join(repodir, 'README*')) maintainer_files = glob.glob( os.path.join(layerdir, 'MAINTAINERS')) if (not maintainer_files) and subdir: maintainer_files = glob.glob( os.path.join(repodir, 'MAINTAINERS')) maintainers = [] if readme_files: (desc, maintainers, deps) = readme_extract(readme_files[0]) if desc: layer.summary = layer.name layer.description = desc if maintainer_files: maintainers.extend(maintainers_extract(readme_files[0])) if (not maintainers) and 'github.com' in layer.vcs_url: if json_data: layer.summary = json_data['description'] layer.description = layer.summary if owner_json_data: owner_name = owner_json_data.get('name', None) owner_email = owner_json_data.get('email', None) if owner_name and owner_email: maintainers.append('%s <%s>' % (owner_name, owner_email)) if layer.name == 'openembedded-core': layer.summary = 'Core metadata' elif layer.name == 'meta-oe': layer.summary = 'Additional shared OE metadata' layer.description = layer.summary if maintainers: maint_re = re.compile( r'^"?([^"@$<>]+)"? *<([^<> ]+)>[ -]*(.+)?$') for maintentry in maintainers: res = maint_re.match(maintentry) if res: maintainer = LayerMaintainer() maintainer.layerbranch = layerbranch maintainer.name = res.group(1).strip() maintainer.email = res.group(2) if res.group(3): maintainer.responsibility = res.group( 3).strip() maintainer.save() layer.save() if not layer_paths: logger.error('No layers added.') sys.exit(1) if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def get_list(self): li = utils.runcmd('systemctl', ['--all', '--no-pager']) return [i['UNIT'].replace(b'\x8f ', b'') for i in li]
def install(): """ Installation du paquet python-lasso """ runcmd('apt-get install -y python-lasso')
def add_mod_wsgi(): """ Active le mod wsgi """ runcmd('apt-get -y install libapache2-mod-wsgi') runcmd('a2enmod wsgi')
def create_certificats(project): """ Créer un certificat et une clef privée clef: /etc/ssl/private/saml-*project*-key.pem certificat: /etc/ssl/certs/saml-*project*-cert.pem """ key = '/etc/ssl/private/saml-%s-key.pem' % project cert = '/etc/ssl/certs/saml-%s-cert.pem' % project if (files.exists(key, use_sudo=True) and files.exists(cert, use_sudo=True)): return runcmd("openssl req -new -x509 -keyout key.pem " "-out cert.pem -nodes -days 3650 -newkey rsa:2048 " "-subj '/CN=%s'" % project) runcmd('chmod 0600 key.pem') runcmd('chmod 0644 cert.pem') runcmd('chown root:root key.pem cert.pem') runcmd('mv key.pem %s' % key) runcmd('mv cert.pem %s' % cert)
def stop(self, name): utils.runcmd('systemctl', ['stop', name])
def install_virtualenv(): """ Installe virtualenv """ runcmd('apt-get install -y python-virtualenv --force-yes')
def restart(): """ Redémarrage bourrin """ runcmd('/etc/init.d/apache2 restart')
def add_layer(self, layer): self.logger.debug("Processing layer %s" % layer) try: git_dir = utils.runcmd("git rev-parse --show-toplevel", destdir=layer, logger=self.logger) except Exception as e: self.logger.error( "Cannot get root dir for layer %s: %s - Skipping." % (layer, str(e))) return 1 layer_name = layer.split('/')[-2] layer_subdir = None if os.path.basename(git_dir) != layer_name: layer_subdir = layer_name layer_name = self.get_layer_name(layer) for i in [1, 2, 3]: remote = utils.runcmd("git remote", destdir=git_dir, logger=self.logger) if not remote: self.logger.warning("Cannot find remote git for %s" % layer_name) return 1 try: git_url = utils.runcmd("git config --get remote.%s.url" % remote, destdir=git_dir, logger=self.logger) except Exception as e: self.logger.info( "Cannot get remote.%s.url for git dir %s: %s" % (remote, git_dir, str(e))) if not os.path.exists(git_url): # Assume this is remote. self.logger.debug("Found git url = %s" % git_url) remote_branch = utils.runcmd( "git rev-parse --abbrev-ref --symbolic-full-name @\{u\}", destdir=git_dir, logger=self.logger) if remote_branch.startswith(remote): actual_branch = remote_branch[len(remote) + 1:] break self.logger.debug("Iterating to find git url into %s" % git_dir) git_dir = git_url if not git_url: self.logger.warning("Cannot find layer %s git url" % layer) return 1 cmd = ['import_layer.py'] if self.options.loglevel == logging.DEBUG: cmd.append("-d") if layer_subdir: cmd.append("-s") cmd.append(layer_subdir) if actual_branch: cmd.append("-a") cmd.append(actual_branch) cmd.append(git_url) cmd.append(layer_name) prefix = "Calling" if self.options.dryrun: prefix = "Would Call" self.logger.info("%s import_layer.main with %s for dir %s" % (prefix, str(cmd), layer)) sys.argv = cmd if not self.options.dryrun: try: import_layer.main() except SystemExit as see: return see.code return 0
def get_list(self): """List unit files""" li = utils.runcmd('systemctl', ['list-unit-files', '--all', '--no-pager']) return [i['UNIT FILE'] for i in li]
def restart(self, name): utils.runcmd('systemctl', ['restart', name])
print_heading('Uncategorized') for review in uncategorized_reviews: print_review(review, '') if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--username', default='mikalstill', help='Your gerrit username') ARGS = parser.parse_args() with open(os.path.expanduser('~/.reviewtargets')) as f: CONFIG = json.loads(f.read()) print utils.runcmd('cd ~/cache/nova-specs; ' 'git checkout master; ' 'git pull') for ent in os.listdir(os.path.expanduser('~/cache/nova-specs' '/specs/%s' % RELEASE)): if not ent.endswith('.rst'): continue APPROVED_SPECS.append(ent[:-4]) possible = reviews.component_reviews('openstack/nova-specs') for review in filter_obvious(possible): try: bp_name = review.get('topic', 'bp/nosuch').split('/')[1] except: bp_name = review.get('topic', '')
def disable(self, name): utils.runcmd('systemctl', ['disable', name])