示例#1
0
def get_openidc_auth():
    """
    use ODCS for creating composes as URL parameter
    It enables this feature in case MTF_ODCS envvar is set
    MTF_ODCS=yes -- use openidc and token for your user
    MTF_ODCS=OIDC_token_string -- use this token for authentication

    :envvar MTF_ODCS: yes or token
    :return:
    """
    odcstoken = get_odcs_envvar()

    # in case you dont have token enabled, try to ask for openidc via web browser
    if is_true(odcstoken):
        if conf.get("openidc").get("token"):
            # use value defined in config file if defined
            return conf["openidc"]["token"]
        # to not have hard dependency on openidc (use just when using ODCS without defined token)
        import openidc_client
        # Get the auth token using the OpenID client.
        oidc = openidc_client.OpenIDCClient(*conf["openidc"]["auth"])
        scopes = conf["openidc"]["scopes"]
        try:
            odcstoken = oidc.get_token(scopes, new_token=True)
        except requests.exceptions.HTTPError as e:
            core.print_info(e.response.text)
            raise mtfexceptions.ModuleFrameworkException(
                "Unable to get token via OpenIDC for your user")
    if odcstoken and len(odcstoken) < 10:
        raise mtfexceptions.ModuleFrameworkException(
            "Unable to parse token for ODCS, token is too short: %s" %
            odcstoken)
    return odcstoken
示例#2
0
 def check_tests(self):
     summary_line = "TEST TYPES SUMMARY"
     prefix_line = "Type"
     output = subprocess.check_output([self.AVOCADO, "list", "-V", "--"] +
                                      self.tests)
     assert summary_line in output
     badstates = [
         "NOT_A_TEST", "MISSING", "ACCESS_DENIED", "BROKEN_SYMLINK"
     ]
     badtests = []
     # remove header line and remove last lines with is test types summary
     testlines = []
     for line in output.split("\n"):
         testline = line.strip()
         if testline.startswith(prefix_line) or not testline:
             continue
         elif testline.startswith(summary_line):
             break
         else:
             splitted = testline.split(" ", 1)
             if splitted[0] in badstates:
                 badtests.append(splitted[1].strip())
     if badtests:
         core.print_info("", "ERROR: There are bad tests:", "-------------")
         core.print_info(*badtests)
         exit(19)
示例#3
0
    def getPackageList(self, profile=None):
        """
        Return list of packages what has to be installed inside module

        :param profile: get list for intended profile instead of default method for searching
        :return: list of packages (rpms)
        """
        package_list = []
        mddata = self.getModulemdYamlconfig()
        if not profile:
            if 'packages' in self.config:
                packages_rpm = self.config.get('packages', {}).get('rpms', [])
                packages_profiles = []
                for profile_in_conf in self.config.get('packages',
                                                       {}).get('profiles', []):
                    packages_profiles += mddata['data']['profiles'][
                        profile_in_conf]['rpms']
                package_list += packages_rpm + packages_profiles
            if get_if_install_default_profile():
                profile_append = mddata.get('data', {})\
                    .get('profiles', {}).get(get_profile(), {}).get('rpms', [])
                package_list += profile_append
        else:
            package_list += mddata['data']['profiles'][profile].get('rpms', [])
        core.print_info("PCKGs to install inside module:", package_list)
        return package_list
示例#4
0
    def get_repo(self):
        # import moved here, to avoid messages when you don't need to use ODCS
        from odcs.client.odcs import ODCS, AuthMech

        if self.odcsauth.get("auth_mech") == AuthMech.OpenIDC:
            if not self.odcsauth.get("openidc_token"):
                self.odcsauth["openidc_token"] = common.get_openidc_auth()
        odcs = ODCS(common.conf["odcs"]["url"], **self.odcsauth)
        core.print_debug(
            "ODCS Starting module composing: %s" % odcs, "%s compose for: %s" %
            (self.compose_type, self.get_module_identifier()))
        compose_builder = odcs.new_compose(
            self.get_module_identifier(), self.compose_type,
            **common.conf["odcs"]["new_compose_dict"])
        timeout_time = common.conf["odcs"]["timeout"]
        core.print_debug("ODCS Module compose started, timeout set to %ss" %
                         timeout_time)
        compose_state = odcs.wait_for_compose(compose_builder["id"],
                                              timeout=timeout_time)
        core.print_debug(
            "ODCS compose debug info for: %s" % self.get_module_identifier(),
            compose_state)
        if compose_state["state_name"] == "done":
            compose = "{compose}/{arch}/os".format(
                compose=compose_state["result_repo"],
                arch=common.conf["generic"]["arch"])
            core.print_info("ODCS Compose done, URL with repo file", compose)
            return compose
        else:
            raise mtfexceptions.PDCExc(
                "ODCS: Failed to generate compose for module: %s" %
                self.get_module_identifier())
 def check_copy_files_exist(self):
     """
     Function checks if COPY instructions contain files which really exist
     :return: True if all files/directories exist
              False otherwise
     """
     dir_name = os.getcwd()
     files = self._get_copy_add_files(os.path.dirname(self.dockerfile))
     f_exists = False
     missing_files = False
     if not files:
         f_exists = True
     else:
         for f in files:
             if f.startswith('http'):
                 f_exists = True
                 continue
             if os.path.exists(os.path.join(dir_name, f)):
                 f_exists = True
             else:
                 core.print_info("The file %s does not exist." % f)
                 missing_files = True
     if missing_files:
         return False
     else:
         return f_exists
    def _get_structure_as_dict(self):
        functions = {ENV: self._get_env,
                     EXPOSE: self._get_general,
                     VOLUME: self._get_volume,
                     LABEL: self._get_label,
                     FROM: self._get_general,
                     RUN: self._get_general,
                     USER: self._get_general,
                     COPY: self._get_general,
                     ADD: self._get_general,
                     }

        self.docker_dict[LABEL] = {}
        for label in self.dfp.labels:
            self.docker_dict[LABEL][label] = self.dfp.labels[label]
        for struct in self.dfp.structure:
            key = struct[INSTRUCT]
            val = struct["value"]
            if key != LABEL:
                if key not in self.docker_dict:
                    self.docker_dict[key] = []
                try:
                    ret_val = functions[key](val)
                    for v in ret_val:
                        self.docker_dict[key].append(v)
                except KeyError:
                    core.print_info("Dockerfile tag %s is not parsed by MTF" % key)
示例#7
0
def getBasePackageSet(modulesDict=None, isModule=True, isContainer=False):
    """
    Get list of base packages (for bootstrapping of various module types)
    It is used internally, you should not use it in case you don't know where to use it.

    :param modulesDict: dictionary of dependent modules
    :param isModule: bool is module
    :param isContainer: bool is contaner?
    :return: list of packages to install
    """
    # nspawn container need to install also systemd to be able to boot
    out = []

    if isModule:
        if isContainer:
            out = conf["packages"]["container"]
        else:

            out = conf["packages"]["common"] + conf["packages"]["basic_modular"]
    else:
        if isContainer:
            out = []
        else:
            out = conf["packages"]["common"] + conf["packages"]["basic"]
    core.print_info("Base packages to install:", out)
    return out
 def get_tag(self, name):
     name = '# %s' % name
     tag_found = True
     if not self.help_md:
         core.print_info("help md does not exist.")
         return False
     if not [x for x in self.help_md if name.upper() in x]:
         tag_found = False
     return tag_found
示例#9
0
def test_PDC_ODCS_nodejs():
    core.print_info(sys._getframe().f_code.co_name)
    parser = PDCParserODCS("nodejs", "8")
    # TODO: need to setup MTF_ODCS variable with odcs token, and ODCS version at least 0.1.2
    # or your user will be asked to for token interactively
    if common.get_odcs_envvar():
        core.print_info(parser.get_repo())


#test_PDC_ODCS_nodejs()
示例#10
0
    def tearDown(self):
        """
        cleanup enviroment and call cleanup from config

        :return: None
        """
        if get_if_do_cleanup():
            self.stop()
            self._callCleanupFromConfig()
        else:
            core.print_info("TearDown phase skipped.")
示例#11
0
 def __init__(self):
     config = {}
     for cfgdir in [self.source_cfg, self.default_cfg, self.user_cfg]:
         core.print_debug("MTF config dir search: {}".format(cfgdir))
         if os.path.exists(cfgdir):
             core.print_info(
                 "MTF config dir exists, search for {}/{}".format(
                     cfgdir, self.pattern))
             for cfgfile in glob.glob(os.path.join(cfgdir, self.pattern)):
                 core.print_info("MTF config load: {}".format(cfgfile))
                 config.update(yaml.load(open(cfgfile)))
     assert config.get("generic")
     super(MTFConfParser, self).__init__(config)
示例#12
0
    def templateTest(self, testname, testlines, method="run"):
        """
        Defines multiline Bash snippet tests part
        of the ``tests/generated.py`` file.
        """
        self.output = self.output + """
    def test_%s(self):
        self.start()
""" % testname
        for line in testlines:
            # only use shell=True for runHost() calls, otherwise variables etc.
            # get expanded too early, i.e. on the host
            self.output = self.output + \
                '        self.%s(""" %s """,  shell=%r)\n' % (
                    method, line, method == "runHost")
        core.print_info("Added test (runmethod: %s): %s" % (method, testname))
 def _get_copy_add_files(self, dirname):
     """
     Function gets all COPY and ADD files from Dockerfile into list.
     It contains only source files not target files
     :param dirname: Dirname where we look for COPY and ADD files.
     :return: list
     """
     files = []
     for instruction in [COPY, ADD]:
         try:
             # Get only source files, not the target
             for x in self.docker_dict[instruction]:
                 if not x.startswith('/'):
                     files.extend(glob.glob(os.path.join(dirname, x)))
         except KeyError:
             core.print_info("Instruction %s is not present in Dockerfile" % instruction)
     return files
示例#14
0
    def __init__(self, args, unknown):
        # choose between TESTS and ADDITIONAL ENVIRONMENT from options
        if args.linter:
            self.tests += glob.glob("{MTF_TOOLS}/{GENERIC_TEST}/*.py".format(
                MTF_TOOLS=metadata_common.MetadataLoaderMTF.MTF_LINTER_PATH,
                GENERIC_TEST=common.conf["generic"]["generic_tests"]))
            self.tests += glob.glob("{MTF_TOOLS}/{STATIC_LINTERS}/*.py".format(
                MTF_TOOLS=metadata_common.MetadataLoaderMTF.MTF_LINTER_PATH,
                STATIC_LINTERS=common.conf["generic"]["static_tests"]))
        self.args = args

        # parse unknow options and try to find what parameter is test
        while unknown:
            if unknown[0] in self.A_KNOWN_PARAMS_SIMPLE:
                self.additionalAvocadoArg.append(unknown[0])
                unknown = unknown[1:]
            elif unknown[0].startswith("-"):
                if "=" in unknown[0] or len(unknown) < 2:
                    self.additionalAvocadoArg.append(unknown[0])
                    unknown = unknown[1:]
                else:
                    self.additionalAvocadoArg += unknown[0:2]
                    unknown = unknown[2:]
            elif glob.glob(unknown[0]):
                # dereference filename via globs
                testlist = glob.glob(unknown[0])
                self.tests += testlist
                unknown = unknown[1:]
            else:
                self.tests.append(unknown[0])
                unknown = unknown[1:]

        if self.args.metadata:
            core.print_info("Using Metadata loader for tests and filtering")
            metadata_tests = filtertests(backend="mtf",
                                         location=os.getcwd(),
                                         linters=False,
                                         tests=[],
                                         tags=[],
                                         relevancy="")
            tests_dict = [x[metadata_common.SOURCE] for x in metadata_tests]
            self.tests += tests_dict
            core.print_debug("Loaded tests via metadata file: %s" % tests_dict)
        core.print_debug("tests = {0}".format(self.tests))
        core.print_debug("additionalAvocadoArg = {0}".format(
            self.additionalAvocadoArg))
示例#15
0
    def download_tagged(self, dirname):
        """
        Downloads packages to directory, based on koji tags
        It downloads just ARCH and noarch packages

        :param dirname: string
        :return: None
        """
        core.print_info("DOWNLOADING ALL packages for %s_%s_%s" %
                        (self.name, self.stream, self.version))
        for foo in process.run("koji list-tagged --quiet %s" %
                               self.get_pdc_info()["koji_tag"],
                               verbose=core.is_debug()).stdout.split("\n"):
            pkgbouid = foo.strip().split(" ")[0]
            if len(pkgbouid) > 4:
                core.print_debug("DOWNLOADING: %s" % foo)

                @timeoutlib.Retry(
                    attempts=common.conf["generic"]["retrycount"] * 10,
                    timeout=common.conf["generic"]["retrytimeout"] * 60,
                    delay=common.conf["generic"]["retrytimeout"],
                    error=mtfexceptions.KojiExc(
                        "RETRY: Unbale to fetch package from koji after %d attempts"
                        % (common.conf["generic"]["retrycount"] * 10)))
                def tmpfunc():
                    a = process.run(
                        "cd %s; koji download-build %s  -a %s -a noarch" %
                        (dirname, pkgbouid, common.conf["generic"]["arch"]),
                        shell=True,
                        verbose=core.is_debug(),
                        ignore_status=True)
                    if a.exit_status == 1:
                        if "packages available for" in a.stdout.strip():
                            core.print_debug(
                                'UNABLE TO DOWNLOAD package (intended for other architectures, GOOD):',
                                a.command)
                        else:
                            raise mtfexceptions.KojiExc(
                                'UNABLE TO DOWNLOAD package (KOJI issue, BAD):',
                                a.command)

                tmpfunc()
        core.print_info("DOWNLOADING finished")
示例#16
0
def test_PDC_general_base_runtime():
    core.print_info(sys._getframe().f_code.co_name)
    parser = PDCParserGeneral("base-runtime", "master")
    assert not parser.generateDepModules()
    assert "module-" in parser.get_pdc_info()["koji_tag"]
    core.print_info(parser.get_repo())
    assert common.conf["compose"]["baseurlrepo"][:30] in parser.get_repo()
    core.print_info(parser.generateParams())
    assert len(parser.generateParams()) == 3
    assert "MODULE=nspawn" in " ".join(parser.generateParams())
    core.print_info("URL=%s" % common.conf["compose"]["baseurlrepo"][:30])
    assert "URL=%s" % common.conf["compose"]["baseurlrepo"][:30] in " ".join(
        parser.generateParams())
示例#17
0
    def installTestDependencies(self, packages=None):
        """
        Install packages on a host machine to prepare a test environment.

        :param (list): packages to install. If not specified, rpms from config.yaml
                       will be installed.
        :return: None
        """
        if not packages:
            packages = self.get_test_dependencies()

        if packages:
            core.print_info("Installs test dependencies: ", packages)
            # you have to have root permission to install packages:
            try:
                self.runHost("{HOSTPACKAGER} install " + " ".join(packages),
                             ignore_status=False,
                             verbose=core.is_debug())
            except avocado.utils.process.CmdError as e:
                raise mtfexceptions.CmdExc(
                    "Installation failed; Do you have permission to do that?",
                    e)
示例#18
0
    def install_packages(self, packages=None):
        """
        Install packages in config (by config or via parameter)

        :param packages:
        :return:
        """
        if not packages:
            packages = self.getPackageList()
        if packages:
            a = self.run("%s install %s" %
                         (self.get_packager(), " ".join(packages)),
                         ignore_status=True,
                         verbose=False)
            if a.exit_status == 0:
                core.print_info(
                    "Packages installed via %s" % self.get_packager(),
                    a.stdout)
            else:
                core.print_info(
                    "Nothing installed via %s, but package list is not empty" %
                    self.get_packager(), packages)
                raise mtfexceptions.CmdExc(
                    "ERROR: Unable to install packages inside: %s" % packages)
示例#19
0
def test_PDC_koji_nodejs():

    common.conf["nspawn"]["basedir"] = "."
    core.print_info(sys._getframe().f_code.co_name)
    parser = PDCParserKoji("nodejs", "8")
    deps = parser.generateDepModules()
    core.print_info(deps)
    assert 'platform' in deps
    core.print_info(parser.get_repo())
    assert "file://" in parser.get_repo()
    assert os.path.abspath(
        common.conf["nspawn"]["basedir"]) in parser.get_repo()
    assert "MODULE=nspawn" in " ".join(parser.generateParams())
    assert "URL=file://" in " ".join(parser.generateParams())
示例#20
0
def test_PDC_general_nodejs():
    core.print_info(sys._getframe().f_code.co_name)
    parser = PDCParserGeneral("nodejs", "8")
    deps = parser.generateDepModules()
    core.print_info(deps)
    assert 'platform' in deps
示例#21
0
def mtfenvclean():
    # cleanup_env exists in more forms for backend: EnvDocker/EnvRpm/EnvNspawn
    env.cleanup_env()
    core.print_info("All clean")
示例#22
0
def mtfenvset():
    core.print_info("Preparing environment ...")
    # cleanup_env exists in more forms for backend : EnvDocker/EnvRpm/EnvNspawn
    env.prepare_env()
    def __init__(self, args, unknown):
        # choose between TESTS and ADDITIONAL ENVIRONMENT from options
        if args.linter:
            self.tests += glob.glob("{MTF_TOOLS}/{GENERIC_TEST}/*.py".format(
                MTF_TOOLS=metadata_common.MetadataLoaderMTF.MTF_LINTER_PATH,
                GENERIC_TEST=common.conf["generic"]["generic_tests"]))
            self.tests += glob.glob("{MTF_TOOLS}/{STATIC_LINTERS}/*.py".format(
                MTF_TOOLS=metadata_common.MetadataLoaderMTF.MTF_LINTER_PATH,
                STATIC_LINTERS=common.conf["generic"]["static_tests"]))
        self.args = args

        # parse unknow options and try to find what parameter is test
        while unknown:
            if unknown[0] in self.A_KNOWN_PARAMS_SIMPLE:
                self.additionalAvocadoArg.append(unknown[0])
                unknown = unknown[1:]
            elif unknown[0].startswith("-"):
                if "=" in unknown[0] or len(unknown) < 2:
                    self.additionalAvocadoArg.append(unknown[0])
                    unknown = unknown[1:]
                else:
                    self.additionalAvocadoArg += unknown[0:2]
                    unknown = unknown[2:]
            elif glob.glob(unknown[0]):
                # dereference filename via globs
                testlist = glob.glob(unknown[0])
                self.tests += testlist
                unknown = unknown[1:]
            else:
                self.tests.append(unknown[0])
                unknown = unknown[1:]

        if self.args.metadata:
            core.print_info("Using Metadata loader for tests and filtering")
            metadata_tests = filtertests(backend="mtf",
                                         location=os.getcwd(),
                                         linters=False,
                                         tests=[],
                                         tags=[],
                                         relevancy="")
            tests_dict = [x[metadata_common.SOURCE] for x in metadata_tests]
            self.tests += tests_dict
            core.print_debug("Loaded tests via metadata file: %s" % tests_dict)
        core.print_debug("tests = {0}".format(self.tests))
        core.print_debug("additionalAvocadoArg = {0}".format(
            self.additionalAvocadoArg))

        # Advanced filtering and testcases adding based on FMF metadata, see help
        if self.args.fmffilter or self.args.fmfpath:
            # if fmf path is empty, use actual directory
            if not self.args.fmfpath:
                self.args.fmfpath = ["."]
            try:
                import fmf
            except ImportError:
                mtfexceptions.ModuleFrameworkException(
                    "FMF metadata format not installed on your system,"
                    " see fmf.readthedocs.io/en/latest/"
                    "for more info (how to install)")
            core.print_debug(
                "Using FMF metadata: path - {} and filters - {}".format(
                    self.args.fmfpath,
                    common.conf["fmf"]["filters"] + self.args.fmffilter))
            for onepath in self.args.fmfpath:
                tree = fmf.Tree(onepath)
                for node in tree.prune(
                        False, common.conf["fmf"]["keys"],
                        common.conf["fmf"]["names"],
                        common.conf["fmf"]["filters"] + self.args.fmffilter):
                    testcase = node.show(False, common.conf["fmf"]["format"],
                                         common.conf["fmf"]["values"])
                    core.print_debug("added test by FMF: {}".format(testcase))
                    self.tests.append(testcase)
示例#24
0
 def printIfVerbose(*sargs):
     if options.verbose:
         core.print_info(sargs)
示例#25
0
 def _tcinfo(self, testcases, header, logs=True, description=True):
     """
     Parse testcases dics and print output for them in nicer format
     Main purpose is to display docstrings of testcases for failures
     example:
         def test_some(something)
             '''
             This is line1.
             This is line2.
             :return: None
             '''
             self.assertTrue(False, msg="This is fail reason")
     procudes line:    desc -> This is line1. This is line2.
                       reason -> This is fail reason
     :param testcases: dict of testcases
     :param header: str what to print as header
     :param logs: boolean if print logs for these testcases (default True)
     :param description: boolean if print description = docs strings for test class + function
     :return: None
     """
     if testcases:
         emptydelimiter = ""
         harddelimiter = "------------------------"
         core.print_info(emptydelimiter,
                         "{0} {1} {0}".format(harddelimiter, header))
         for testcase in testcases:
             tcname = testcase
             if re.search('^[0-9]+-', testcase.get('id', "")):
                 tcname = testcase.get('id').split("-", 1)[1]
             tcnameoutput = tcname
             splitted = re.search("(.*):(.+)\.(.+)$", tcname)
             if splitted:
                 docstrcls = []
                 docstrtst = []
                 testfile, classname, fnname = splitted.groups()
                 try:
                     testmodule = imp.load_source("test", testfile)
                     if getattr(testmodule, classname).__doc__:
                         docstrcls = getattr(
                             testmodule,
                             classname).__doc__.strip().split("\n")
                     if getattr(getattr(testmodule, classname),
                                fnname).__doc__:
                         docstrtst = getattr(
                             getattr(testmodule, classname),
                             fnname).__doc__.strip().split("\n")
                     tcnameoutput = " ".join([
                         x for x in docstrcls + docstrtst
                         if not re.search(":.*:", x)
                     ])
                     # TODO: replace more whitespaces just by one - we should find better solution how to that
                     tcnameoutput = ' '.join(tcnameoutput.split())
                 except Exception as e:
                     core.print_debug(
                         "(INFO) Error happen when parsing testcase name ({})"
                         .format(tcname), e)
                     pass
             core.print_info("TEST {0}:  {1}".format(
                 testcase.get('status'), tcname))
             if description and tcnameoutput != tcname and tcnameoutput and tcnameoutput.strip(
             ):
                 core.print_info("     desc -> {0}".format(tcnameoutput))
             if testcase.get('fail_reason'
                             ) and testcase.get('fail_reason') != "None":
                 core.print_info("     reason -> {0}".format(
                     testcase.get('fail_reason')))
             if logs:
                 core.print_info("     {0}".format(testcase.get('logfile')))
             core.print_info(emptydelimiter)
示例#26
0
import common
import core

import mtf_scheduler
from environment_prepare.docker_prepare import EnvDocker
from environment_prepare.rpm_prepare import EnvRpm
from environment_prepare.nspawn_prepare import EnvNspawn
from environment_prepare.openshift_prepare import EnvOpenShift

# I'm lazy to do own argument parser here.
args, unknown = mtf_scheduler.cli()
if unknown:
    raise ValueError("unsupported option(s){0}".format(unknown))

module_name = common.get_module_type_base()
core.print_info("Setting environment for module: {} ".format(module_name))

if module_name == "docker":
    env = EnvDocker()
elif module_name == "rpm":
    env = EnvRpm()
elif module_name == "nspawn":
    env = EnvNspawn()
elif module_name == "openshift":
    env = EnvOpenShift()


def mtfenvset():
    core.print_info("Preparing environment ...")
    # cleanup_env exists in more forms for backend : EnvDocker/EnvRpm/EnvNspawn
    env.prepare_env()