def test_execute(self):
     (output, rc) = execute("false", failok=True)
     self.assertEqual(rc, 1)
     self.assertIs(len(output), 1)
     self.assertIs(len(output[0]), 0)
     with self.assertRaises(CommandFailed):
         execute("false", failok=False)
Ejemplo n.º 2
0
 def parse_blocks(path):
     """Parse C source file and return a dictionary of
     blocks {block id:  block}."""
     blocks = {}
     try:
         (output, _) = tools.execute("undertaker -j blockrange %s" % path,
                                     failok=False)
     except tools.CommandFailed:
         return blocks
     for out in output:
         block = Block(path)
         split = out.split(":")
         block.bid = split[1]
         block.range = (int(split[2]), int(split[3]))
         block.new_range = block.range
         if block.range[0] != 0:
             (precond, _) = tools.execute("undertaker -j blockpc %s:%i:1" %
                                          (path, block.range[0] + 1))
             block.precondition = precond
             for pre in precond:
                 block.ref_items.update(tools.get_kconfig_items(pre))
         # Add the file variable to the list of referenced items in order to
         #  make it visible to block.get_transitive_items()
         block.ref_items.add("FILE_" +
                             kbuild.normalize_filename(block.srcfile))
         blocks[block.bid] = block
     return blocks
Ejemplo n.º 3
0
def coreboot_get_config_for(subarch=None):
    if subarch != None:
        subarch_regex = re.compile("([a-zA-Z0-9-]+)/([a-zA-Z_0-9_-]+)")
        m = subarch_regex.match(subarch)

        if m:
            vendor = m.group(1)
            mainboard = m.group(2)
            logging.debug("Using Vendor '%s', Mainboard '%s'", vendor,
                          mainboard)

            cmd = './util/abuild/abuild -B -C -t %s/%s' % (vendor, mainboard)
            if not os.path.isdir('./coreboot-builds/%s_%s' %
                                 (vendor, mainboard)):
                execute(cmd, failok=True)

            if not os.path.isdir('./coreboot-builds/%s_%s' %
                                 (vendor, mainboard)):
                raise RuntimeError('%s failed. ' % cmd + \
                    'Maybe Vendor and/or Mainboard does not exist?')
        else:
            raise RuntimeError('SUBARCH (%s) given but has invalid syntax, \
                use "Vendor/Mainboard" instead' % subarch)

        shutil.copy(
            './coreboot-builds/%s_%s/config.build' % (vendor, mainboard),
            '.config')
Ejemplo n.º 4
0
    def process(self, parser, args, dirs_to_process):
        """ For Busybox, we need to generate the full Makefiles before parsing
        them by running 'make gen_build_files'. Additionally, initialize the
        list of top-level directories."""

        # Prepare tree to contain processed Kbuild files
        Tools.execute("make gen_build_files", failok=False)

        parser.global_vars.create_variable("no_config_nesting", 0)

        if len(args.directory) > 0:
            # User provided directories have no precondition
            for item in args.directory:
                dirs_to_process[item] = DataStructures.Precondition()
        else:
            # Default directories have no precondition. Find them by parsing the
            # top-level Makefile for a line starting with "libs-y :=" which does
            # not contain $(libs-y1) (i.e. don't consider the internal lists
            # which are constructed to call patsubst on etc.)
            with open(self.get_file_for_subdirectory("."), "r") as infile:
                while True:
                    (good, line) = Tools.get_multiline_from_file(infile)
                    if not good:
                        break
                    if line.startswith(
                            "libs-y\t") and not "$(libs-y1)" in line:
                        subdirs = [x for x in line.split() if x]
                        # Drop the first two tokens ("libs-y" and "+=") and
                        # initialize the dictionary with the remaining ones.
                        for subdir in subdirs[2:]:
                            dirs_to_process[
                                subdir] = DataStructures.Precondition()
                        break
Ejemplo n.º 5
0
def check_code_defect(block):
    """Check the code defect and extend its defect report."""
    # report the block's boolean precondition
    reason = "Contradiction"
    if "undead" in block.defect:
        reason = "Tautology"
    block.report += "\n\t%s in the block's precondition:" % reason

    for cond in block.precondition:
        block.report += "\n\t%s" % cond

    cpp_items = []

    # find previously defined CPP items (e.g., #define CONFIG_)
    (output, _) = tools.execute(r"git grep -n '^\s*#def' %s" % block.srcfile)
    for out in output:
        feature = tools.get_kconfig_items(out)
        if feature and feature[0] in block.ref_items:
            cpp_items.append(out)

    # find previously undefined CPP items (e.g., #undefine CONFIG_)
    (output, _) = tools.execute(r"git grep -n '^\s*#undef' %s" % block.srcfile)
    for out in output:
        feature = tools.get_kconfig_items(out)
        if feature and feature[0] in block.ref_items:
            cpp_items.append(out)

    if cpp_items:
        block.report += "\n\n\tThe following lines of source code may cause "
        block.report += "the defect:"
        for item in sorted(cpp_items):
            block.report += "\n\t\t%s" % item
Ejemplo n.º 6
0
def get_loc_coverage(filename, autoconf_h=None):
    """
    Returns LOC of the given file taking the current configuration into account

    If the parameter 'autoconf_h' is set to an existing file, then the
    source file is preprocessed with 'cpp' with the given
    configuration. Usually, this will be some 'autoconf.h'. For Linux,
    configuration. Usually, this will be some 'autoconf.h'. If it is not
    set, then the file will not be preprocessed at all.

    The given filename is stripped from '#include' directives, and
    (optionally) configured with a configuration file.

    ..note: For Linux, use the method 'find_autoconf()' from the
            vamos.golem.kbuild package to find a suitable autoconf_h

    ..note: Use '/dev/null' for an empty configuration

    """

    assert(os.path.exists(filename))
    cmd = r"grep -v -E '^\s*#\s*include' %s " % filename

    if autoconf_h and os.path.exists(autoconf_h):
        cmd += ' | cpp -include %s' % autoconf_h

    (lines, _) = execute(cmd, echo=True, failok=True)
    # we ignore the exitcode here as we are not interested in failing
    # for #error directives and similar.
    return len(lines)
Ejemplo n.º 7
0
def call_makefile_generic(target, failok=False, dryrun=False, **kwargs):
    """
    Invokes 'make'.

    This variant is intended to work in a generic way. For project
    specific adaptations, a wrapper function may call this.

    If dryrun is True, then the command line is returned instead of the
    command's execution output. This is mainly useful for testing.

    returns a tuple with
     1. the command's standard output as list of lines
     2. the exitcode
    """

    njobs = kwargs.get('njobs', None)

    if njobs is None:
        njobs = int(os.sysconf('SC_NPROCESSORS_ONLN') * 1.20 + 0.5)

    cmd = "env %(extra_env)s make -j%(njobs)s %(target)s %(extra_variables)s " %\
        {
        'target': target,
        'njobs': njobs,
        'extra_env': kwargs.get('extra_env', ""),
        'extra_variables': kwargs.get('extra_variables', ""),
        }

    if dryrun:
        return (cmd, 0)
    else:
        return execute(cmd, failok=failok)
Ejemplo n.º 8
0
def get_loc_coverage(filename, autoconf_h=None):
    """
    Returns LOC of the given file taking the current configuration into account

    If the parameter 'autoconf_h' is set to an existing file, then the
    source file is preprocessed with 'cpp' with the given
    configuration. Usually, this will be some 'autoconf.h'. For Linux,
    configuration. Usually, this will be some 'autoconf.h'. If it is not
    set, then the file will not be preprocessed at all.

    The given filename is stripped from '#include' directives, and
    (optionally) configured with a configuration file.

    ..note: For Linux, use the method 'find_autoconf()' from the
            vamos.golem.kbuild package to find a suitable autoconf_h

    ..note: Use '/dev/null' for an empty configuration

    """

    assert (os.path.exists(filename))
    cmd = r"grep -v -E '^\s*#\s*include' %s " % filename

    if autoconf_h and os.path.exists(autoconf_h):
        cmd += ' | cpp -include %s' % autoconf_h

    (lines, _) = execute(cmd, echo=True, failok=True)
    # we ignore the exitcode here as we are not interested in failing
    # for #error directives and similar.
    return len(lines)
Ejemplo n.º 9
0
 def cleanup_autoconf_h(self):
     """
     deletes various autoconf.h files
     returns a list of deleted files
     """
     (files, _) = execute("find build -name config.h -print -delete",
                          failok=False)
     return files
Ejemplo n.º 10
0
def apply_configuration(arch=None, subarch=None, filename=None):
    """
    Applies the current configuration

    This method updates 'include/config/auto.conf' and
    'include/generated/autoconf.h' to match the current configuration.
    Expects a complete configuration in '.config'. If it does not exist,
    the standard configuration 'allyesconfig' is configured.

    If not applying for the default architecture 'x86', the optional
    parameters "arch" (and possibly "subarch") need to be specified.

    If an optional filename is passed, then architecture and subarch are
    guessed using the guess_arch_from_filename() function. Overriding
    either arch or subarch remains possible by explicitly setting arch
    or subarch.
    """

    if filename:
        (guessed_arch, guessed_subarch) = guess_arch_from_filename(filename)
        if not arch:
            arch = guessed_arch
        if not subarch:
            subarch = guessed_subarch

    if not arch:
        logging.warning("No architecture selected. Defaulting to x86")
        arch = 'x86'

    if not os.path.exists(".config"):
        call_linux_makefile("allyesconfig", arch=arch, subarch=subarch)

    # this catches unset defaults. Since boolean and tristate have
    # implicit defaults, this can effectively only happen for integer
    # and hex items. Both are fine with a setting of '0'
    execute('sed -i s,=$,=0,g .config', failok=False)
    try:
        call_linux_makefile('silentoldconfig',
                            arch=arch,
                            subarch=subarch,
                            failok=False)
    except CommandFailed as e:
        if e.returncode == 2:
            raise TreeNotConfigured("target 'silentoldconfig' failed")
        else:
            raise
 def cleanup_autoconf_h(self):
     """
     deletes various autoconf.h files
     returns a list of deleted files
     """
     (files,
      _) = tools.execute("find include -name autoconf.h -print -delete",
                         failok=False)
     return files
    def OP_list(self, selection):
        """
        to be run in a fiasco source tree

        The parameter features represents a (partial) config selection in
        the form of a dict from feature -> value e.g: {'CONFIG_X86': 'y',
        'CONFIG_BARFOO': 'm'}.

        @return a tuple of ([variability_implementation],
                            [point_of_variability]).
        """

        features = selection.to_dict()

        arch = features.get("CONFIG_XARCH", None)
        if arch in ["arm", "ppc32"]:
            bsp = features.get("CONFIG_BSP_NAME", None)
            if not bsp in self.BSP_dict[arch]:
                return (set(), set(["src/Modules.%s" % arch]))

        if features.get("CONFIG_MP", None) != None:
            features["MPCORE_PHYS_BASE"] = "23"

        scriptsdir = kbuild.find_scripts_basedir()
        assert (os.path.exists(os.path.join(scriptsdir,
                                            'Makefile.list_fiasco')))

        fd = NamedTemporaryFile()
        logging.debug("dumping partial configuration with %d items to %s",
                      len(features.items()), fd.name)
        for (key, value) in features.items():
            fd.write("%s=%s\n" % (key, value))
            logging.debug("%s=%s", key, value)
        fd.flush()

        make="make -f %(basedir)s/Makefile.list_fiasco auto_conf=%(tempfile)s" % \
            { 'basedir' : scriptsdir,
              'tempfile': fd.name}

        (stdout, ret) = tools.execute(make)
        assert ret == 0
        stdout = {tuple((x + " ").split(" ", 1)) for x in stdout}
        if not stdout.has_key("MAKEFILE_LIST"):
            raise tools.CommandFailed("Makefile.list_fiasco", -1, stdout)
        if not stdout.has_key("PREPROCESS_PARTS"):
            raise tools.CommandFailed("Makefile.list_fiasco", -1, stdout)

        var_impl = {
            x
            for x in stdout["PREPROCESS_PARTS"].split() if len(x) > 0
        }
        var_points = {
            x
            for x in stdout["MAKEFILE_LIST"].split()
            if len(x) > 0 and x != fd.name
        }
        return (var_impl, var_points)
Ejemplo n.º 13
0
def find_blocks(cmd, regex):
    (output, _) = execute(cmd, failok=False)
    blocks = set()
    blocks.add(0)
    for line in output:
        m = re.match(regex, line)
        if m:
            blocks.add(int(m.group(1)))

    return blocks
Ejemplo n.º 14
0
def find_blocks(cmd, regex):
    (output, _) = execute(cmd, failok=False)
    blocks = set()
    blocks.add(0)
    for line in output:
        m = re.match(regex, line)
        if m:
            blocks.add(int(m.group(1)))

    return blocks
 def __call_compiler(self, compiler, args, on_file):
     cmd = compiler + " " + args
     if compiler in self.framework.options['args']:
         cmd += " " + self.framework.options['args'][compiler]
     cmd += " " + self.get_cppflags()
     cmd += " '" + on_file + "'"
     (out, returncode) = tools.execute(cmd, failok=True)
     if returncode == 127:
         raise RuntimeError(compiler + " not found on this system?")
     else:
         return (out, returncode)
Ejemplo n.º 16
0
 def __call_compiler(self, compiler, args, on_file):
     cmd = compiler + " " + args
     if compiler in self.framework.options['args']:
         cmd += " " + self.framework.options['args'][compiler]
     cmd += " " + self.get_cppflags()
     cmd += " '" + on_file + "'"
     (out, returncode) = execute(cmd, failok=True)
     if returncode == 127:
         raise RuntimeError(compiler + " not found on this system?")
     else:
         return (out, returncode)
Ejemplo n.º 17
0
def get_conditional_blocks(filename,
                           autoconf_h=None,
                           all_cpp_blocks=False,
                           strip_linums=True):
    """
    Counts the conditional blocks in the given source file

    The calculation is done using the 'zizler' program from the system
    path.

    If the parameter 'autoconf_h' is set to an existing file, then the
    source file is preprocessed with 'cpp' with the given
    configuration. Usually, this will be some 'autoconf.h'. For Linux,
    this can be detected with the method find_autoconf() from the golem
    package.

    If the parameter all_cpp_blocks is set to false, only configuration
    controlled conditional blocks will be counted, otherwise all
    blocks. Configuration controlled conditional blocks are blocks with
    an CPP expression that contains at least one CPP identifier starting
    with the 'CONFIG_'.

    Implementation detail: This function will use the 'zizler -cC'
    command if all_cpp_blocks is set to false, and 'zizler -c' if set to
    true.

    @return a non-empty list of blocks found in the source file

    """

    if all_cpp_blocks:
        normalizer = 'zizler -c "%s"' % filename
    else:
        normalizer = 'zizler -cC "%s"' % filename

    if autoconf_h and os.path.exists(autoconf_h):
        cmd = '%s | cpp -include %s' % (normalizer, autoconf_h)
    else:
        cmd = normalizer
    (stdout, rc) = execute(cmd, echo=True, failok=True)

    blocks = filter(lambda x: len(x) != 0 and x.startswith("B"), stdout)
    # With never versions of zizler line numbers for each block are
    # also printed. By default they are stripped, to retain backward
    # compatibility
    #  "B00 23" -> "B00"
    if strip_linums and len(blocks) > 0 and " " in blocks[0]:
        blocks = [x.split(" ", 1)[0] for x in blocks]
    if rc != 0:
        logging.warning("'%s' signals exitcode: %d", cmd, rc)
        if rc == 127:
            raise CommandFailed(cmd, 127, stdout)
    return blocks
Ejemplo n.º 18
0
    def OP_list(self, selection):
        """
        to be run in a fiasco source tree

        The parameter features represents a (partial) config selection in
        the form of a dict from feature -> value e.g: {'CONFIG_X86': 'y',
        'CONFIG_BARFOO': 'm'}.

        @return a tuple of ([variability_implementation],
                            [point_of_variability]).
        """

        features = selection.to_dict()

        arch = features.get("CONFIG_XARCH", None)
        if arch in ["arm", "ppc32"]:
            bsp = features.get("CONFIG_BSP_NAME", None)
            if not bsp in self.BSP_dict[arch]:
                return (set(), set(["src/Modules.%s" % arch]))

        if features.get("CONFIG_MP", None) != None:
            features["MPCORE_PHYS_BASE"] = "23"

        scriptsdir = find_scripts_basedir()
        assert(os.path.exists(os.path.join(scriptsdir, 'Makefile.list_fiasco')))

        fd = NamedTemporaryFile()
        logging.debug("dumping partial configuration with %d items to %s", len(features.items()), fd.name)
        for (key, value) in features.items():
            fd.write("%s=%s\n" % (key, value))
            logging.debug("%s=%s", key, value)
        fd.flush()

        make="make -f %(basedir)s/Makefile.list_fiasco auto_conf=%(tempfile)s" % \
            { 'basedir' : scriptsdir,
              'tempfile': fd.name}

        try:
            (stdout, ret) = execute(make)
            assert ret == 0
            stdout = dict([tuple((x + " ").split(" ", 1)) for x in stdout])
            if not stdout.has_key("MAKEFILE_LIST"):
                raise CommandFailed("Makefile.list_fiasco", -1, stdout)
            if not stdout.has_key("PREPROCESS_PARTS"):
                raise CommandFailed("Makefile.list_fiasco", -1, stdout)

            var_impl = set([x for x in stdout["PREPROCESS_PARTS"].split()
                            if len(x) > 0])
            var_points = set([x for x in stdout["MAKEFILE_LIST"].split()
                              if len(x) > 0 and x != fd.name])
            return (var_impl, var_points)
        except:
            raise
Ejemplo n.º 19
0
def get_conditional_blocks(filename, autoconf_h=None, all_cpp_blocks=False,
                           strip_linums=True):
    """
    Counts the conditional blocks in the given source file

    The calculation is done using the 'zizler' program from the system
    path.

    If the parameter 'autoconf_h' is set to an existing file, then the
    source file is preprocessed with 'cpp' with the given
    configuration. Usually, this will be some 'autoconf.h'. For Linux,
    this can be detected with the method find_autoconf() from the golem
    package.

    If the parameter all_cpp_blocks is set to false, only configuration
    controlled conditional blocks will be counted, otherwise all
    blocks. Configuration controlled conditional blocks are blocks with
    an CPP expression that contains at least one CPP identifier starting
    with the 'CONFIG_'.

    Implementation detail: This function will use the 'zizler -cC'
    command if all_cpp_blocks is set to false, and 'zizler -c' if set to
    true.

    @return a non-empty list of blocks found in the source file

    """

    if all_cpp_blocks:
        normalizer = 'zizler -c "%s"' % filename
    else:
        normalizer = 'zizler -cC "%s"' % filename

    if autoconf_h and os.path.exists(autoconf_h):
        cmd = '%s | cpp -include %s' % (normalizer, autoconf_h)
    else:
        cmd = normalizer
    (stdout, rc) = execute(cmd, echo=True, failok=True)

    blocks = filter(lambda x: len(x) != 0 and x.startswith("B"), stdout)
    # With never versions of zizler line numbers for each block are
    # also printed. By default they are stripped, to retain backward
    # compatibility
    #  "B00 23" -> "B00"
    if strip_linums and len(blocks) > 0 and " " in blocks[0]:
        blocks = [x.split(" ", 1)[0] for x in blocks]
    if rc != 0:
        logging.warning("'%s' signals exitcode: %d", cmd, rc)
        if rc == 127:
            raise CommandFailed(cmd, 127, stdout)
    return blocks
Ejemplo n.º 20
0
    def apply_black_white_lists(self, ignoreset):
        """
        This function creates the "allno.config" and "allyes.config" files in
        the root directory of the current tree, which ensure, when calling the
        respective "make all{no, yes}config", the always_off_items to be always
        off and the always_on_items to be always on, even when calling "make
        all{no,yes}config" without using the framework.

        Be careful though, when having an item without prompt in Kconfig, the
        previously described behaviour CANNOT be guaranteed!
        """

        # Sanity check
        if len(self.always_on_items & self.always_off_items) > 0:
            raise RuntimeError("Intersection between always_on_items and \
            always_off_items is non-empty: %s"                                               %(self.always_on_items & \
            self.always_off_items))

        del_cmds = list()
        for item in self.always_on_items | self.always_off_items:
            if any([i in item for i in ignoreset]): continue
            del_cmds.append("/^%s=/d" % item)
        if len(del_cmds) > 0:
            sed_commands = ";".join(del_cmds)
            execute("sed '%s' .config > allno.config" % sed_commands)
            self.call_makefile("allnoconfig")

        del_cmds = list()
        for item in self.always_on_items:
            if any([i in item for i in ignoreset]): continue
            del_cmds.append("/^%s=/d" % item)
        if len(del_cmds) > 0:
            sed_commands = ";".join(del_cmds)
            execute("sed '%s' .config > allyes.config" % sed_commands)
            self.call_makefile("allyesconfig")
        else:
            self.call_makefile("silentoldconfig")
Ejemplo n.º 21
0
def find_autoconf():
    """ returns the path to the autoconf.h file in this linux tree
    """

    if vamos.golem.autoconf_h:
        return vamos.golem.autoconf_h

    (autoconf, _) = execute("find include -name autoconf.h", failok=False)
    autoconf = [x for x in autoconf if len(x) > 0]
    if len(autoconf) != 1:
        logging.error("Found %d autoconf.h files (%s)", len(autoconf),
                      ", ".join(autoconf))
        raise RuntimeError("Not exactly one autoconf.h was found")
    vamos.golem.autoconf_h = autoconf[0]
    return vamos.golem.autoconf_h
Ejemplo n.º 22
0
def defect_analysis(srcfile, models, flag=""):
    """Defect analysis using the Undertaker tool.
    Returns list of defect reports."""
    reports = []
    defect_pattern = re.compile(r"[\S]+\.[cSh]\.B[0-9]+[\S]+")
    (output, _) = tools.execute("undertaker -v -m %s %s %s" %
                                (models, srcfile, flag), failok=True)
    for report in output:
        if not report.startswith("I:"):
            continue
        matches = defect_pattern.findall(report)
        if matches:
            defect = matches[0].strip()
            reports.append(defect)
    return reports
Ejemplo n.º 23
0
def get_coreboot_version():
    """
    Check that the current working directory is actually a Coreboot tree.

    If we are in a git tree or a tarball with build/autoconf.h, return that bios
    version if it starts with 4., else raise a NotACorebootTree-Exception
    or return "coreboot-UNKNOWN" if in a bare tarball.
    """

    if not (os.path.exists('Makefile') and os.path.exists('Makefile.inc') \
            and os.path.exists('src/Kconfig')):
        raise NotACorebootTree(
            "No 'Makefile', 'Makefile.inc' or 'src/Kconfig' found")

    if os.path.isdir('.git'):
        cmd = "git describe"
        (output, ret) = execute(cmd)
        git_version = output[0]
        if (ret > 0):
            git_version = ""
            logging.debug(
                "Execution of '%s' command failed, analyzing the Makefile instead",
                cmd)
        # 'standard' Coreboot repository descriptions start with 4.
        if git_version.startswith("4."):
            return git_version
        raise NotACorebootTree("Only 4.x versions are supported, but not %s",
                               git_version)

    if os.path.exists('build/config.h'):
        regx = re.compile(r" \* coreboot version: ([a-zA-Z_0-9_.-]+)")
        with open('build/config.h') as conf:
            version = None
            for line in conf:
                if regx.match(line):
                    m = regx.match(line)
                    version = m.group(1)
                    break

        # 'standard' Coreboot repository descriptions start with 4.
        if version and version.startswith("4."):
            return version
        raise NotACorebootTree("Only 4.x versions are supported, but not %s",
                               version)

    # at this stage, we are "sure" to have a coreboot tree, since Makefile and
    # Makefile.inc and src/Kconfig files exist, but no valid version is known
    return "coreboot-UNKNOWN"
Ejemplo n.º 24
0
def get_busybox_version():
    """
    Check that the current working directory is actually a Busybox tree

    If we are in a git tree, return that kernel version. Otherwise,
    use a custom Makefile to retrieve the current kernel version.

    Raises a 'NotABusyboxTree' exception if the version could not be retrieved.
    """

    scriptsdir = find_scripts_basedir()

    if not os.path.exists('Makefile'):
        raise NotABusyboxTree("No 'Makefile' found")

    if os.path.isdir('.git'):
        cmd = "git describe"
        (output, ret) = execute(cmd)
        git_version = output[0]
        if (ret > 0):
            git_version = ""
            logging.debug(
                "Execution of '%s' command failed, analyzing the Makefile instead",
                cmd)

        # 'standard' Busybox repository descriptions start with 1_
        if git_version.startswith("1_"):
            return git_version
        else:
            raise NotABusyboxTree(
                "Git does not indicate a supported busybox version")

    extra_vars = "-f %(basedir)s/Makefile.version UNDERTAKER_SCRIPTS=%(basedir)s" % \
        { 'basedir' : scriptsdir }

    (output, ret) = call_makefile_generic('', extra_variables=extra_vars)
    if ret > 0:
        raise NotABusyboxTree("The call to Makefile.version failed")

    version = output[
        -1]  # use last line, if not configured we get additional warning messages
    if not version.startswith("1."):
        raise NotABusyboxTree("Only 1.x versions are supported, but not %s",
                              version)
    else:
        return version
Ejemplo n.º 25
0
    def get_transitive_items(self, model):
        """Return a sorted list of all referenced items and items that are in
        the block's dependencies of the specified model."""
        items = " ".join(self.ref_items)
        if not items:
            return []

        (deps, _) = tools.execute("undertaker -j interesting -m %s %s" %
                                  (model.path, items))
        items_set = set()
        for dep in deps:
            items_set.update(tools.get_kconfig_items(dep))

        # filter Undertaker internal choice items (e.g., 'CONFIG_CHOICE_42',
        # 'CONFIG_CHOICE_42_MODULE', 'CONFIG_CHOICE_42_META')
        choice_regex = re.compile(
            r"CONFIG\_CHOICE\_\d+((?:_MODULE)|(?:_META)){,1}$")
        return sorted(itertools.ifilterfalse(choice_regex.match, items_set))
Ejemplo n.º 26
0
def determine_buildsystem_variables(arch=None):
    """
    returns a list of kconfig variables that are mentioned in Linux Makefiles
    """
    if arch == 'coreboot':
        cmd = r"find . \( -name Makefile.inc -o -name Makefile \) " + \
              r"-exec sed -n '/CONFIG_/p' {} \+"
    elif arch:
        cmd = r"find . \( -name Kbuild -o -name Makefile \) " + \
              r"\( ! -path './arch/*' -o -path './arch/%(arch)s/*' \) " + \
              r"-exec sed -n '/CONFIG_/p' {} \+"
        cmd = cmd % {'arch': arch}
    else:
        cmd = r"find . \( -name Kbuild -o -name Makefile \) -exec sed -n '/CONFIG_/p' {} \+"
    find_result = execute(cmd, failok=False)

    ret = set()
    for line in find_result[0]:
        for m in re.finditer(BUILDSYSTEM_VARIABLE_REGEX, line):
            config_variable = m.group('feature')
            ret.add(config_variable)
    return ret
Ejemplo n.º 27
0
    def calculate_configurations(self, filename):
        """Calculate configurations for the given file

        returns a list of 'Configuration' objects that match the
        buildsystem class.
        """
        if self.options.has_key(
                'keep_configurations') and self.options['keep_configurations']:
            return self.verify_configurations(filename)
        cmd = "undertaker -q -j coverage -C %s -O combined" % self.options[
            'coverage_strategy']
        if self.options.has_key('model') and self.options['model']:
            cmd += " -m %s" % self.options['model']
        else:
            logging.info("No model specified, running without models")

        if self.options.has_key('whitelist'):
            cmd += " -W %s" % self.options['whitelist']

        if self.options.has_key('blacklist'):
            cmd += " -B %s" % self.options['blacklist']

        logging.info("Calculating configurations for '%s'", filename)
        if self.options and self.options.has_key('args'):
            if 'undertaker' in self.options['args']:
                cmd += " " + self.options['args']['undertaker']

        cmd += " '%s'" % filename.replace("'", "\\'")
        (output, statuscode) = execute(cmd, failok=True)
        if statuscode != 0 or any([l.startswith("E:") for l in output]):
            logging.error("Running undertaker failed: %s", cmd)
            print "--"
            for i in output:
                logging.error(i)

        return self.verify_configurations(filename)
Ejemplo n.º 28
0
def call_linux_makefile(target,
                        extra_env="",
                        extra_variables="",
                        filename=None,
                        arch=None,
                        subarch=None,
                        failok=True,
                        dryrun=False,
                        njobs=None):
    # pylint: disable=R0912
    """
    Invokes 'make' in a Linux Buildtree.

    This utility function hides details how to set make and environment
    variables that influence kbuild. An important variable is 'ARCH'
    (and possibly later 'SUBARCH'), which can be via the corresponding
    variables.

    If a target points to an existing file (or the optional target
    filename is given)the environment variable for ARCH is derived
    according to the follwing rules:

      - if the file is inside an "arch/$ARCHNAME/", use $ARCHNAME
      - if the "arch" variable is set, use that
      - by default use 'default_arch'
      - if the arch is set to 'x86', set ARCH to 'i386', unless the
        "prefer_64bit" parameter is set to 'True'

    If dryrun is True, then the command line is returned instead of the
    command's execution output. This is mainly useful for testing.

    returns a tuple with
     1. the command's standard output as list of lines
     2. the exitcode
    """

    if extra_env and "ARCH=" in extra_env or extra_variables and "ARCH=" in extra_variables:
        logging.debug(
            "Detected manual (SUB)ARCH override in extra arguments '(%s, %s)'",
            extra_env, extra_variables)
    else:
        if os.path.exists(target):
            filename = target
        if filename:
            (guessed_arch,
             guessed_subarch) = guess_arch_from_filename(filename)

            if not arch:
                arch = guessed_arch

            if not subarch:
                subarch = guessed_subarch

    if not arch:
        (arch, subarch) = guess_arch_from_filename('Makefile')

    if not subarch:
        subarch = arch

    if arch == 'x86':
        # x86 is special - set default first
        if vamos.prefer_32bit:
            variant = 'i386'
        else:
            variant = 'x86_64'
        # do we need to override manually?
        if subarch == 'i386': variant = 'i386'
        if subarch == 'x86_64': variant = 'x86_64'
        extra_env += " ARCH=%s" % variant
    else:
        extra_env += " ARCH=%s" % arch
    extra_env += " SUBARCH=%s" % subarch

    if not 'KERNELVERSION=' in extra_variables:
        if not vamos.kernelversion:
            (output, rc) = execute("git describe", failok=True)
            if rc == 0:
                vamos.kernelversion = output[-1]
        if vamos.kernelversion:
            extra_env += ' KERNELVERSION="%s"' % vamos.kernelversion

    return call_makefile_generic(target,
                                 failok=failok,
                                 njobs=njobs,
                                 dryrun=dryrun,
                                 extra_env=extra_env,
                                 extra_variables=extra_variables)
Ejemplo n.º 29
0
 def __init__(self, directory_prefix = ""):
     LinuxInferenceAtoms.__init__(self, "busybox", None)
     self.directory_prefix = directory_prefix
     execute("make gen_build_files", failok=False)