def issue_cert(self, hostname=system.fqdn, hash="1024", key_prv=None, key_pub=None): """Issues a cert. hostname: CN value. key_prv: Alternate path to store the certificate's private key. key_pub: Alternate path to store the certificate's public key. """ with lcd(self.workspace): runcmd( ("openssl req -newkey rsa:%s -nodes -sha1 -keyout " "cert.key -keyform PEM -out cert.req -outform PEM " "-subj '/DC=%s/DC=%s/CN=%s'" % (hash, self.domain_comp_country, self.domain_comp, hostname))) runcmd(("openssl x509 -req -in cert.req -CA ca.pem -CAkey ca.key " "-CAcreateserial -out cert.crt -days 1")) if key_prv: runcmd("cp cert.key %s" % key_prv) info("Private key stored in '%s'." % key_prv) if key_pub: runcmd("cp cert.crt %s" % key_pub) info("Public key stored in '%s'." % key_pub)
def disable_repo(self, repolist): for repo in to_list(repolist): r = self.client.disable_repo(repo) if r.failed: api.fail("Could not disable repo '%s'" % repo) else: api.info("Repository '%s' disabled" % repo)
def _run(self): logfile = os.path.join(config.CFG["log_path"], "qc_conf.stderr") module_path = utils.runcmd("puppet config print modulepath", envvars=[( "PATH", "$PATH:/opt/puppetlabs/bin")], nosudo=True, stop_on_error=False) if module_path: self.module_path = ':'.join([self.module_path, module_path]) cmd = ("%s apply --verbose --debug --modulepath %s %s " "--detail-exitcodes") % (self.puppet_bin, self.module_path, self.manifest) r = utils.runcmd(cmd, os.getcwd(), log_to_file="qc_conf", stop_on_error=False, nosudo=True) if r.return_code == 0: api.info("Puppet execution ended successfully.") elif r.return_code == 2: api.info(("Puppet execution ended successfully (changes were " "applied)")) r.failed = False else: api.fail("Puppet execution failed. More information on %s log" % logfile, stop_on_error=True) r.failed = True return r
def run(self, qc_step): self.pre_config() self.nodetype = butils.to_list(self.nodetype) self.siteinfo = butils.to_list(self.siteinfo) if not self.nodetype or not self.siteinfo: raise exception.ConfigException(("Could not run YAIM: Bad " "nodetype or site-info.")) with tempfile.NamedTemporaryFile("w+t", dir=self.config_path, delete=True) as f: for si in self.siteinfo: f.write("source %s\n" % si) f.flush() info(("Creating temporary file '%s' with " "content: %s" % (f.name, f.readlines()))) # NOTE(orviz) Cannot use 'capture=True': execution gets # stalled (defunct) with lcd(self.config_path): abort_exception_default = env.abort_exception env.abort_exception = exception.ConfigException try: local("/opt/glite/yaim/bin/yaim -c -s %s -n %s" % (f.name, " -n ".join(self.nodetype))) except exception.ConfigException: fail(("YAIM execution failed. Check the logs at " "'/opt/glite/yaim/log/yaimlog'.")) info("YAIM configuration ran successfully.") env.abort_exception = abort_exception_default self.post_config()
def _run_checks(self, qc_step, config): """Runs the checks received.""" failed_checks = [] for check in self._get_checklist(config): description, user, f, args = check info("Probe '%s'" % description) cmd = "./%s" % " ".join([f, args]) if user: cmd = ' '.join(["su %s -c" % user, cmd]) cmd_failed = False if not self._is_executable(f): result = ("Could not run check '%s': file is not " "executable" % f) cmd_failed = True else: self._handle_user(qc_step, user) with shell_env(**self.qc_envvars): r = qc_step.runcmd(cmd) cmd_failed = r.failed result = r if cmd_failed: fail("Command '%s' failed: %s" % (cmd, result)) failed_checks.append(cmd) else: ok("Command '%s' ended OK with result: %s" % (cmd, result)) return failed_checks
def get_pkglist(self, r): """Gets the list of packages being installed parsing yum output.""" d = {} lines = r.split('\n') try: for line in lines[lines.index("Installed:"):]: if line.startswith(' '): for pkg in map(None, *([iter(line.split())] * 2)): name, version = pkg name, arch = name.rsplit('.', 1) version = version.split(':')[-1] d[name] = '.'.join(['-'.join([name, version]), arch]) except ValueError: api.info("No new package installed.") # Look for already installed packages for line in r.split('\n'): m = re.search(("Package (matching ){0,1}(.+) already " "installed"), line) if m: all = m.groups()[-1] pattern = "([a-zA-Z0-9-_]+)-\d+.+" name = re.search(pattern, all).groups()[0] d[name] = ' '.join([all, "(already installed)"]) return d
def config(self): self.nodetype = utils.to_list(self.nodetype) self.siteinfo = utils.to_list(self.siteinfo) if not self.nodetype or not self.siteinfo: raise exception.ConfigException(("Could not run YAIM: Bad " "nodetype or site-info.")) with tempfile.NamedTemporaryFile("w+t", dir=config.CFG["yaim_path"], delete=True) as f: for si in self.siteinfo: f.write("source %s\n" % si) f.flush() api.info(("Creating temporary file '%s' with " "content: %s" % (f.name, f.readlines()))) # NOTE(orviz) Cannot use 'capture=True': execution gets # stalled (defunct) with context_managers.lcd(config.CFG["yaim_path"]): abort_exception_default = fabric_api.env.abort_exception fabric_api.env.abort_exception = exception.ConfigException try: fabric_api.local("/opt/glite/yaim/bin/yaim -c -s %s -n %s" % (f.name, " -n ".join(self.nodetype))) except exception.ConfigException: fabric_api.abort(api.fail(("YAIM execution failed. Check " "the logs at '/opt/glite/yaim/" "log/yaimlog'."))) api.info("YAIM configuration ran successfully.") fabric_api.env.abort_exception = abort_exception_default
def _enable_verification_repo(self, qc_step, url, download_dir="/tmp/repofiles"): """Downloads the repofiles found in the given URL.""" qc_step.runcmd("rm -rf %s/*" % download_dir, fail_check=False) qc_step.runcmd("wget -P %s -r --no-parent -R*.html* %s" % (download_dir, url), fail_check=False, stop_on_error=False) repofiles = [] for path in os.walk(download_dir): if path[2] and path[0].find(self.pkgtool.get_repodir()) != -1: for f in path[2]: if f.endswith(self.pkgtool.get_extension()): repofiles.append(os.path.join(path[0], f)) if repofiles: repopath = self.pkgtool.get_path() for f in repofiles: repofile = os.path.basename(f) shutil.copy2(f, os.path.join(repopath, repofile)) api.info("Verification repository '%s' enabled." % repofile) else: qc_step.print_result("FAIL", "Could not find any valid '%s' filename." % self.pkgtool.get_extension(), do_abort=True)
def issue_cert(self, hostname=system.fqdn, hash="1024", key_prv=None, key_pub=None): """Issues a cert. hostname: CN value. key_prv: Alternate path to store the certificate's private key. key_pub: Alternate path to store the certificate's public key. """ with fabric.context_managers.lcd(self.workspace): subject = "/DC=%s/DC=%s/CN=%s" % (self.domain_comp_country, self.domain_comp, hostname) utils.runcmd(("openssl req -newkey rsa:%s -nodes -sha1 -keyout " "cert.key -keyform PEM -out cert.req -outform PEM " "-subj '%s' -config openssl.cnf" % (hash, subject)), chdir=self.workspace) utils.runcmd(("openssl x509 -req -in cert.req -CA ca.pem -CAkey " "ca.key -CAcreateserial -extensions v3_req -extfile " "openssl.cnf -out cert.crt -days 1"), chdir=self.workspace) if key_prv: utils.runcmd("chmod 400 cert.key", chdir=self.workspace) utils.runcmd("cp cert.key %s" % key_prv, chdir=self.workspace) api.info("Private key stored in '%s' (with 400 perms)." % key_prv) if key_pub: utils.runcmd("cp cert.crt %s" % key_pub, chdir=self.workspace) api.info("Public key stored in '%s'." % key_pub) return OwnCACert(subject, key_prv, key_pub)
def _write(fname, msg): dirname = os.path.dirname(fname) if not os.path.exists(dirname): os.makedirs(dirname) api.info("Log directory '%s' has been created." % dirname) with open(fname, 'a') as f: f.write(msg) f.flush()
def add_repo_key(self, keylist): for key in keylist: r = runcmd("rpm --import %s" % key, stop_on_error=False) if r.failed: api.fail("Could not add key '%s'" % key) else: api.info("Repository key added: %s" % key)
def enable_repo(self, repolist, **kwargs): if not os.path.exists(self.client.path): os.makedirs(self.client.path) for repo in to_list(repolist): r = self.client.add_repo(repo, **kwargs) if r.failed: api.fail("Could not add repo '%s'" % repo) else: api.info("Repository '%s' added" % repo)
def add_repo_key(self, keylist): for key in keylist: runcmd("wget -q %s -O /tmp/key.key" % key, stop_on_error=False) r = runcmd("apt-key add /tmp/key.key", stop_on_error=False) if r.failed: api.fail("Could not add key '%s'" % key) else: api.info("Repository key added: %s" % key)
def _get_metapkg_list(self): l = [] for pkg in ["dcache"]: if isinstance(pkg, list): l.extend(pkg) else: l.append(pkg) api.info("Release candidate will install the following products: %s" % l) return l
def _request(self, *args, **kwargs): step_methods = [] if "qc_step" in kwargs.keys(): for step in utils.to_list(kwargs["qc_step"]): try: method = getattr(self, step.lower()) step_methods.append(method) except AttributeError: api.info("Ignoring QC step '%s': not defined." % step) continue return f(self, step_methods, *args, **kwargs)
def remove_repo(self, repolist): """Remove all the appearances of a list of repositories. :repolist: list of repository names (ID between brackets) """ for repo in repolist: r = runcmd("grep %s %s/* | cut -d':' -f1|uniq" % (repo, self.path)) if r: for f in r.split('\n'): os.remove(f) api.info("Existing repository '%s' removed." % f)
def pre_install(self): repo_id = { "redhat6": "sl6", "redhat5": "sl5", } config.CFG["repository_url"] = [os.path.join( config.CFG["repository_url"][0], "repofiles/%s/" % repo_id[system.distro_version])] api.info("Changing repository URL to %s" % config.CFG["repository_url"][0]) config.CFG["metapkg"] = self._get_metapkg_list()
def pre_config(self): ca_version = config.CFG["ca_version"] extra_vars = [] if ca_version: extra_vars = [ "ca_verification: true", "ca_version: %s" % self.format_version(ca_version)] else: api.info("Installing last available production version") if config.CFG["distribution"] == "umd": extra_vars.append("crl_deploy: true") config.CFG["qc_specific_id"].append("crl") self.cfgtool.extra_vars = extra_vars
def remove_repo(self, repolist): """Remove all the appearances of a list of repositories. :repolist: list of repository names. """ install("software-properties-common") available_repos = self.get_repos() for repo in repolist: for available_repo in available_repos: if available_repo.find(repo) != -1: runcmd("apt-add-repository -r '%s'" % available_repo) api.info("Existing repository removed: %s" % available_repo)
def _set_hiera(self): """Sets hiera configuration files in place.""" api.info("Adding hiera parameter files: %s" % self.params_files) utils.render_jinja( "hiera.yaml", { "hiera_data_dir": self.hiera_data_dir, "params_files": self.params_files, }, output_file=os.path.join("/etc/hiera.yaml")) shutil.copy("/etc/hiera.yaml", "/etc/puppet/hiera.yaml") if not os.path.exists("/etc/puppetlabs/code"): os.makedirs("/etc/puppetlabs/code") shutil.copy("/etc/hiera.yaml", "/etc/puppetlabs/code/hiera.yaml")
def check_input(): """Performs a list of checks based on input parameters.""" # 1) Type of installation if config.CFG["installation_type"]: api.info("Installation type: %s" % config.CFG["installation_type"]) else: api.fail(("Need to provide the type of installation to be performed: " "(install, upgrade)"), do_abort=True) # 2) Verification repository URL if not config.CFG["repository_url"]: api.warn("No verification repository URL provided.") # 3) Metapackage if config.CFG["metapkg"]: msg = "Metapackage/s selected: %s" % ''.join([ "\n\t+ %s" % mpkg for mpkg in config.CFG["metapkg"]]) api.info(msg) print(u'\u2500' * 73)
def show_exec_banner_ascii(): """Displays execution banner (ascii).""" cfg = config.CFG.copy() basic_repo = ["umd_release_pkg", "igtf_repo"] print(u'\n') print(green(u'UMD verification tool').center(120)) print(u'=====================\n'.center(111)) print((u'%s: %s' % (white(u'Quality criteria'), blue(u'http://egi-qc.github.io'))).center(120)) print((u'%s: %s' % ( white(u'Codebase'), blue(("https://github.com/egi-qc/umd-verification")))).center(120)) print(u'') print(u'\t%s' % white(u'Path locations')) print(u'\t %s' % white('|')) for k in ["log_path", "yaim_path", "puppet_path"]: v = cfg.pop(k) leftjust = len(max(basic_repo, key=len)) + 5 print(u'\t %s %s %s' % (white('|'), k.ljust(leftjust), v)) print(u'\t') print(u'\t%s' % white(u'Production repositories')) print(u'\t %s' % white('|')) for repo in basic_repo: try: v = cfg.pop(repo) except KeyError: v = None leftjust = len(max(basic_repo, key=len)) + 5 print(u'\t %s %s %s' % (white('|'), repo.ljust(leftjust), blue(v))) print(u'\n\n') if "repository_url" in cfg.keys(): api.info("Using the following UMD verification repositories") repos = to_list(cfg.pop("repository_url")) for repo in repos: print(u'\t+ %s' % blue(repo)) if "repository_file" in cfg.keys(): api.info("Using the following repository files") repos = to_list(cfg.pop("repository_file")) for repo in repos: print(u'\t+ %s' % blue(repo))
def pre_config(self): info("PRE-config actions.") self.pkgtool.install(pkgs=["ntp"]) info("<ntp> installed.") runcmd("mount -o remount,acl,user_xattr /") info("Enabled ACLs and Extended Attribute Support in /") info("END of PRE-config actions.")
def pre_config(self): api.info("PRE-config actions.") utils.install("ntp") api.info("<ntp> installed.") utils.runcmd("mount -o remount,acl,user_xattr /") api.info("Enabled ACLs and Extended Attribute Support in /") api.info("END of PRE-config actions.")
def pre_config(self): os_release = config.CFG["openstack_release"] self.cfgtool.module.append(( "git://github.com/egi-qc/puppet-keystone.git", "umd_stable_%s" % os_release)) keystone_voms_params = [ "keystone_voms::openstack_version: %s" % os_release, "cacert: %s" % config.CFG["ca"].location ] keystone_voms_conf = os.path.join( config.CFG["cfgtool"].hiera_data_dir, "keystone_voms.yaml") if utils.to_yaml(keystone_voms_conf, keystone_voms_params): api.info(("keystone-voms hiera parameters " "set: %s" % keystone_voms_conf)) # Add it to hiera.yaml config.CFG["cfgtool"]._add_hiera_param_file("keystone_voms.yaml") pki.trust_ca(config.CFG["ca"].location) # Apache2 if system.distro_version == "ubuntu16": utils.runcmd(("sed -e '/ServerName*/c\ServerName %s' " "/etc/apache2/apache2.conf") % system.fqdn) utils.runcmd("/etc/init.d/apache2 restart") elif system.distro_version == "centos7": utils.runcmd(("sed -e '/ServerName*/c\ServerName %s' " "/etc/httpd/conf/httpd.conf") % system.fqdn) utils.runcmd("systemctl restart httpd") # mysql - set current hostname utils.runcmd(("mysql -e 'UPDATE keystone.endpoint SET " "url=\"https://%s:5000/v2.0\" WHERE url " "like \"%%5000%%\";'" % system.fqdn)) utils.runcmd(("mysql -e 'UPDATE keystone.endpoint SET " "url=\"https://%s:35357/v2.0\" WHERE url " "like \"%%35357%%\";'" % system.fqdn)) # FIXME Create tenant VO:dteam utils.runcmd(("/bin/bash -c 'source /root/.nova/admin-novarc ; " "openstack --os-password $OS_PASSWORD " "--os-username $OS_USERNAME " "--os-project-name $OS_PROJECT_NAME " "--os-auth-url $OS_AUTH_URL " "--os-cacert $OS_CACERT " "project create --enable VO:dteam --or-show'"))
def pre_validate(self): info("PRE-validate actions.") self.pkgtool.install(pkgs=self.pre_validate_pkgs) info("<%s> installed." % ", ".join(self.pre_validate_pkgs)) info("END of PRE-validate actions.")
def run(self, qc_specific_id, qc_envvars=None): if qc_specific_id: try: with open(QC_SPECIFIC_FILE) as f: d = yaml.load(f) except IOError: info("Could not load QC-specific config file: %s" % QC_SPECIFIC_FILE) try: d[qc_specific_id] except KeyError: info("QC-specific ID '%s' definition not found " "in configuration file '%s'" % (qc_specific_id, QC_SPECIFIC_FILE)) config = collections.defaultdict(dict) for k, v in d[qc_specific_id].items(): config[k] = v if qc_envvars: self.qc_envvars = qc_envvars self.qc_func_1(config["qc_func_1"]) self.qc_func_2(config["qc_func_2"]) else: info(("No QC-specific ID provided: no specific QC probes " "will be ran."))
def pre_config(self): api.info("PRE-config actions.") utils.install("sudo") api.info("<sudo> package installed.") api.info("END of PRE-config actions.")
def pre_config(self): info("PRE-config actions.") self.pkgtool.install(pkgs="sudo") info("<sudo> package installed.") info("END of PRE-config actions.")
def pre_validate(self): api.info("PRE-validate actions.") utils.install(self.pre_validate_pkgs) api.info("<%s> installed." % ", ".join(self.pre_validate_pkgs)) api.info("END of PRE-validate actions.")
def validate(self): # Strong validations first: (umd_release, repository_url) v_umd_release = self.get("umd_release", None) v_cmd_release = self.get("cmd_release", None) v_repo = self.get("repository_url", None) v_repo_file = self.get("repository_file", None) if not v_umd_release and not v_cmd_release: if self.get("cmd_one_release", None): v_cmd_release = self.get("cmd_one_release") else: api.fail(("No UMD or CMD release was selected: cannot start " "deployment"), stop_on_error=True) else: api.info("Using UMD %s release repository" % v_umd_release) if v_repo: api.info("Using UMD verification repository: %s" % v_repo) if v_repo_file: api.info("Using UMD verification repository file: %s" % v_repo_file) # Configuration management: Puppet from umd.base.configure.puppet import PuppetConfig if isinstance(self.__getitem__("cfgtool"), PuppetConfig): if not self.__getitem__("puppet_release"): api.fail(("No Puppet release package defined for '%s' " "distribution" % system.distname), stop_on_error=True)
def pre_config(self): api.info("PRE-config actions.") utils.install(["sudo", "gridengine", "gridengine-qmaster"]) api.info(("<sudo>, <gridengine> and <gridengine-qmaster> packages " "installed.")) api.info("END of PRE-config actions.")
def pre_config(self): info("PRE-config actions.") self.pkgtool.install(pkgs=["sudo", "gridengine", "gridengine-qmaster"]) info(("<sudo>, <gridengine> and <gridengine-qmaster> packages " "installed.")) info("END of PRE-config actions.")
def certify(): """Create host certificate and private key.""" cert_path = "/etc/grid-security/hostcert.pem" key_path = "/etc/grid-security/hostkey.pem" do_cert = True utils.runcmd("mkdir -p /etc/grid-security/certificates") utils.runcmd("chown root:root /etc/grid-security") utils.runcmd("chmod 0755 /etc/grid-security") if os.path.isfile(cert_path) and os.path.isfile(key_path): if not config.CFG.get("dont_ask_cert_renewal", False): r = prompt(("Certificate already exists under " "'/etc/grid-security'. Do you want to " "overwrite them? (y/N)")) if r.lower() == "y": api.info("Overwriting already existant certificate") else: do_cert = False api.info("Using already existant certificate") cert_for_subject = None if do_cert: hostcert = config.CFG.get("hostcert", None) hostkey = config.CFG.get("hostkey", None) if hostkey and hostcert: api.info("Using provided host certificates") utils.runcmd("cp %s %s" % (hostkey, key_path)) utils.runcmd("chmod 400 %s" % key_path) utils.runcmd("cp %s %s" % (hostcert, cert_path)) cert_for_subject = hostcert else: api.info("Generating own certificates") config.CFG["ca"] = OwnCA( domain_comp_country="es", domain_comp="UMDverification", common_name="UMDVerificationOwnCA") config.CFG["ca"].create( trusted_ca_dir="/etc/grid-security/certificates") config.CFG["cert"] = config.CFG["ca"].issue_cert( hash="2048", key_prv=key_path, key_pub=cert_path) else: cert_for_subject = cert_path if cert_for_subject: subject = get_subject(cert_for_subject) config.CFG["cert"] = OwnCACert(subject)
def trust_ca(ca_location): """Add the given CA to the system's CA trust database.""" if system.distname == "ubuntu": trust_dir = "/usr/share/ca-certificates/" trust_cmd = "update-ca-certificates" elif system.distname == "centos": trust_dir = "/etc/pki/ca-trust/source/anchors/" trust_cmd = "update-ca-trust" ca_location_basename = os.path.basename(ca_location) ca_location_basename_crt = '.'.join([ ca_location_basename.split('.')[0], "crt"]) utils.runcmd("cp %s %s" % ( ca_location, os.path.join(trust_dir, ca_location_basename_crt))) utils.runcmd("echo '%s' >> /etc/ca-certificates.conf" % ca_location_basename_crt) r = utils.runcmd(trust_cmd) if r.failed: api.fail("Could not add CA '%s' to the system's trust DB" % ca_location) else: api.info("CA '%s' added to system's trust DB" % ca_location)
def create_fake_proxy(vo="dteam", out="/tmp/umd_proxy"): """Creates a fake proxy for further testing. :vo: VO used for the proxy creation. :out: Output path to store the proxy being created. """ fqdn = system.fqdn keypath = "/tmp/userkey.crt" certpath = "/tmp/usercert.crt" config.CFG["ca"].issue_cert(hostname="perico-palotes", hash="2048", key_prv=keypath, key_pub=certpath) utils.runcmd(("voms-proxy-fake -rfc -cert %s -key %s " "-hours 44000 -voms %s -hostcert " "/etc/grid-security/hostcert.pem -hostkey " "/etc/grid-security/hostkey.pem " "-fqan /%s/Role=NULL/Capability=NULL " "-uri %s:15000 -out %s") % (certpath, keypath, vo, vo, fqdn, out)) api.info("Fake proxy created under '%s'" % out) return out
def pre_install(self): api.info("PRE-install actions.") try: pwd.getpwnam("storm") except KeyError: utils.runcmd("/usr/sbin/adduser -M storm") api.info("users storm and gridhttps added") api.info("END of PRE-install actions.")
def pre_install(self): info("PRE-install actions.") try: pwd.getpwnam("storm") except KeyError: runcmd("/usr/sbin/adduser -M storm") info("users storm and gridhttps added") info("END of PRE-install actions.")
def run(self, **kwargs): """Runs UMD installation.""" self._check() # Handle installation type installation_type = config.CFG["installation_type"] if installation_type == "update": qc_step = butils.QCStep("QC_UPGRADE_1", "Upgrade", "qc_upgrade_1") elif installation_type == "install": qc_step = butils.QCStep("QC_DIST_1", "Binary Distribution", "qc_inst_1") repo_config = True if "ignore_repo_config" in kwargs.keys(): repo_config = False if repo_config: # Distribution-based settings repopath = self.pkgtool.client.path msg_purge = "UMD" paths_to_purge = ["%s/UMD-*" % repopath] pkgs_to_purge = ["umd-release*"] pkgs_to_download = [("UMD", config.CFG["umd_release"])] pkgs_additional = [] if system.distname == "redhat": msg_purge = " ".join(["EPEL and/or", msg_purge]) paths_to_purge.insert(0, "%s/epel-*" % repopath) pkgs_to_purge.insert(0, "epel-release*") pkgs_to_download.insert(0, ("EPEL", config.CFG["epel_release"])) pkgs_additional.append("yum-priorities") # Installation/upgrade workflow r = qc_step.runcmd(self.pkgtool.remove(pkgs_to_purge), stop_on_error=False) if r.failed: api.info("Could not delete %s release packages." % msg_purge) if qc_step.runcmd("/bin/rm -f %s" % " ".join(paths_to_purge)): api.info("Purged any previous %s repository file." % msg_purge) for pkg in pkgs_to_download: pkg_id, pkg_url = pkg if pkg_url: pkg_base = os.path.basename(pkg_url) pkg_loc = os.path.join("/tmp", pkg_base) if qc_step.runcmd("wget %s -O %s" % (pkg_url, pkg_loc)): api.info("%s release package fetched from %s." % (pkg_id, pkg_url)) r = qc_step.runcmd(self.pkgtool.install(pkg_loc)) if r.failed: qc_step.print_result("FAIL", ("Error while installing %s " "release.") % pkg_id) else: api.info("%s release package installed." % pkg_id) for pkg in pkgs_additional: r = qc_step.runcmd(self.pkgtool.install(pkg)) if r.failed: api.info("Error while installing '%s'." % pkg) else: api.info("'%s' requirement installed." % pkg) if config.CFG["dryrun"]: api.info(("Installation or upgrade process will be simulated " "(dryrun: ON)")) self.pkgtool.dryrun = True if installation_type == "update": if config.CFG["repository_url"]: # 1) Install base (production) version r = qc_step.runcmd(self.pkgtool.install(self.metapkg)) if not r.failed: api.info("UMD product/s '%s' production version installed." % self.metapkg) # 2) Enable verification repository for url in config.CFG["repository_url"]: self._enable_verification_repo(qc_step, url) # 3) Refresh qc_step.runcmd(self.pkgtool.refresh()) # 4) Update api.info("Using repositories: %s" % self.pkgtool.get_repos()) r = qc_step.runcmd(self.pkgtool.update(), fail_check=False, stop_on_error=False, get_error_msg=True) d = self.pkgtool.get_pkglist(r) elif installation_type == "install": # 1) Enable verification repository for url in config.CFG["repository_url"]: self._enable_verification_repo(qc_step, url) # 2) Refresh qc_step.runcmd(self.pkgtool.refresh()) # 3) Install verification version api.info("Using repositories: %s" % self.pkgtool.get_repos()) r = qc_step.runcmd(self.pkgtool.install(self.metapkg), fail_check=False, stop_on_error=False, get_error_msg=True) d = self.pkgtool.get_pkglist(r) # NOTE(orviz): missing WARNING case else: raise exception.InstallException(("Installation type '%s' " "not implemented." % installation_type)) is_ok = True # r.stderr if r.failed: # FIXME (should be within YUM class) YUM's downloadonly # plugin returns 1 on success if r.stderr.find("--downloadonly specified") != -1: is_ok = True msgtext = "Dry-run installation ended successfully." else: is_ok = False msgtext = r.msgerror else: msgtext = "Installation ended successfully." if is_ok: if self.metapkg: for pkg in self.metapkg: try: api.info("Package '%s' installed version: %s." % (pkg, d[pkg])) except KeyError: api.fail("Package '%s' could not be installed." % pkg) is_ok = False msgtext = "Not all the packages could be installed." else: api.info("List of packages updated: %s" % self.pkgtool.get_pkglist(r)) if is_ok: qc_step.print_result("OK", msgtext) else: qc_step.print_result("FAIL", msgtext, do_abort=True)
def run(self, installation_type, epel_release_url, umd_release_url, repository_url=None, **kwargs): """Runs UMD installation. Arguments:: installation_type: install from scratch ('install') or update ('update') epel_release_url: EPEL release (URL). umd_release_url : UMD release (URL). repository_url: base repository URL (with the verification stuff). """ if installation_type == "update": qc_step = QCStep("QC_UPGRADE_1", "Upgrade", "/tmp/qc_upgrade_1") elif installation_type == "install": qc_step = QCStep("QC_INST_1", "Binary Distribution", "/tmp/qc_inst_1") r = self.pkgtool.remove(pkgs=["epel-release*", "umd-release*"]) if r.failed: info("Could not delete [epel/umd]-release packages.") if qc_step.runcmd(("/bin/rm -f /etc/yum.repos.d/UMD-* " "/etc/yum.repos.d/epel-*")): info(("Purged any previous EPEL or UMD repository file.")) for pkg in (("EPEL", epel_release_url), ("UMD", umd_release_url)): pkg_id, pkg_url = pkg pkg_base = os.path.basename(pkg_url) pkg_loc = os.path.join("/tmp", pkg_base) if qc_step.runcmd("wget %s -O %s" % (pkg_url, pkg_loc)): info("%s release RPM fetched from %s." % (pkg_id, pkg_url)) r = self.pkgtool.install(pkgs=[pkg_loc]) if r.failed: qc_step.print_result( "FAIL", "Error while installing %s release." % pkg_id) else: info("%s release package installed." % pkg_id) r = self.pkgtool.install(pkgs=["yum-priorities"]) if r.failed: info("Error while installing 'yum-priorities'.") else: info("'yum-priorities' (UMD) requirement installed.") if installation_type == "update": # 1) Install base (production) version r = self.pkgtool.install(pkgs=[self.metapkg]) if r.failed: qc_step.print_result("FAIL", "Error while installing '%s' packages" % self.metapkg, do_abort=True) else: info("UMD product/s '%s' installation finished." % self.metapkg) # 2) Enable verification repository if repository_url: info("Verification repository provided.") self._enable_verification_repo(qc_step, repository_url) # 3) Update r = self.pkgtool.update() if r.failed: qc_step.print_result("FAIL", ("Error updating from verification " "repository."), do_abort=True) else: qc_step.print_result("OK", msg="System successfully updated.") elif installation_type == "install": # 1) Enable verification repository if repository_url: info("Verification repository provided.") self._enable_verification_repo(qc_step, repository_url) # 2) Install verification version r = self.pkgtool.install(self.metapkg) # NOTE(orviz): missing WARNING case if r.failed: qc_step.print_result("FAIL", ("There was a failure installing " "metapackage '%s'." % self.metapkg), do_abort=True) else: qc_step.print_result("OK", ("Metapackage '%s' installed " "successfully.." % self.metapkg)) else: raise exception.InstallException( ("Installation type '%s' " "not implemented." % installation_type))