Esempio n. 1
0
 def getCanonicalPath( self ):
     p = PackageDetector( self._sourceTree )
     p.retrieveMakefileInfo()
     canonicalPath = os.path.join( p.packageCategory,
                                   p.packageName,
                                   p.packageVersion )
     return canonicalPath
    def _populatePackage(self, projectRoot, details):
        if details:
            Any.requireIsInstance(details, PackageDetector)
            self.details = details

        else:
            BuildSystemTools.requireTopLevelDir(projectRoot)

            logging.info('analyzing package... (this may take some time)')
            self.details = PackageDetector(projectRoot)
            self.details.retrieveMakefileInfo()
            self.details.retrieveVCSInfo()
Esempio n. 3
0
    def __init__(self,
                 projectRoot,
                 sitPath=None,
                 stdout=None,
                 stderr=None,
                 details=None):

        Any.requireIsDir(projectRoot)

        self.projectRoot = projectRoot
        logging.debug('topLevelDir=%s' % projectRoot)

        if details is None:
            details = PackageDetector(projectRoot)

            try:
                details.retrieveMakefileInfo()
            except EnvironmentError as e:
                logging.warning(e)  # e.g. $SIT not defined in environment

        else:
            Any.requireIsInstance(details, PackageDetector)

        if sitPath is None:
            sitPath = SIT.getPath()

        if FastScript.getEnv('MAKEFILE_DOC') == 'FALSE':
            handler = NullBackend
        elif details.docTool == 'doxygen':
            handler = DoxygenBackend
        elif details.docTool == 'matdoc':
            handler = MatlabBackend
        elif details.docTool == '':
            handler = NullBackend
        elif details.isMatlabPackage():

            # [CIA-1131] matdoc no longer works on xenial64
            #
            # On Ubuntu 16.04 disable documentation creation
            # (do not use doxygen either to not overwrite the
            # correct matdoc-output from Ubuntu 14.04 machines)

            if getHostPlatform().startswith('xenial'):
                handler = NullBackend
            else:
                handler = MatlabBackend

        else:
            handler = DoxygenBackend

        self.backend = handler(details, sitPath, stdout, stderr)
Esempio n. 4
0
    def uninstall( self, cleanGlobalInstallation=True ):
        from ToolBOSCore.Packages import PackageCreator

        detector = PackageDetector( self._sourceTree )
        detector.retrieveMakefileInfo()

        try:
            PackageCreator.uninstall( detector.canonicalPath,
                                      cleanGlobalInstallation )
            return True

        except OSError as details:
            logging.error( details )
            return False
    def run(self):
        if not 'category' in self.values:
            self.values['category'] = 'External'

        if not 'buildRules' in self.values:
            self.values[
                'buildRules'] = '''# This is a dummy file needed by various aux. scripts.
#
# The actual build instructions can be found in the compile.sh.
'''

        self.createMainPackage()

        srcDir = os.path.join(self.templateDir, 'External_without_compilation')
        dstDir = self.dstDir

        for fileName in FastScript.getFilesInDir(srcDir, '.php'):
            srcFile = os.path.join(srcDir, fileName)
            dstFile = os.path.join(dstDir, fileName)
            self.copyVerbatim(srcFile, dstFile)

        self.copyVerbatim(os.path.join(srcDir, 'pkgInfo.py'),
                          os.path.join(dstDir, 'pkgInfo.py'))

        FastScript.remove(os.path.join(dstDir, 'unittest.sh'))

        # create exemplarily (fake-)tarball file, and interface-symlink to it
        tarball = os.path.join(dstDir, 'src',
                               'Example-1.0-precompiled.tar.bz2')
        symlink = os.path.join(dstDir, 'src', 'package.tar.bz2')

        logging.info('processing %s' % tarball)
        FastScript.setFileContent(tarball, '')

        logging.info('processing %s' % symlink)
        os.symlink('Example-1.0-precompiled.tar.bz2', symlink)

        # create basic packageVar.cmake
        #
        # Note: for some reason calling FastScript.changeDirectory() with rel. path failed,
        # continuing with absolute path as workaround
        dstDir = os.path.abspath(dstDir)
        Any.requireIsDir(dstDir)

        details = PackageDetector(dstDir)
        details.retrieveMakefileInfo()

        fileName = os.path.join(dstDir, 'packageVar.cmake')
        PackageVarCmakeWriter(details).write(fileName)
Esempio n. 6
0
def setEnv():
    """
        This function loads the pkgInfo.py of each dependency package.
        If environment settings are found there they will be loaded into
        the environment of the current Python process.

        Example:
            The pkgInfo.py of Matlab states MATLAB_ROOT, PATH and
            LD_LIBRARY_PATH settings in its pkgInfo.py. Before compiling
            such variables must be set.

        On Linux, alternatively, this can be achieved by sourcing the
        BashSrc files. Nevertheless this is not possible in all cases
        (especially Matlab because multiple versions are available)
        when used within CIA.

        With some modifications this setEnv() approach conceptually
        potentially could also work on Windows.

        Attention: This function should only be called once (at least not
                   repeatedly when compiling the same package again from
                   within Python) to not unnecessarily increase the length
                   of PATH, LD_LIBRARY_PATH etc.
    """
    try:
        p = PackageDetector()
    except AssertionError:
        # XIF packages are generated on-the-fly during configure-phase.
        # We don't consider such packages for now (experimental code).
        return

    p.retrieveMakefileInfo()

    for package in p.dependencies + p.buildDependencies:
        try:
            envVars = PkgInfo.getPkgInfoContent( SIT.stripSIT(package) )['envVars']
        except ( AssertionError, KeyError ):
            envVars = []                         # no envVars specified
        except ( IOError, OSError, SyntaxError ) as details:
            logging.error( details )
            raise RuntimeError( 'unable to read pkgInfo.py of %s' % package )

        if envVars:
            logging.debug( 'found environment settings:' )
            logging.debug( envVars )

            for varName, varValue in envVars:
                FastScript.setEnv_withExpansion( varName, varValue )
    def __init__(self, details=None):
        """
            Allows modifying an existing pkgInfo.py file, f.i. add/remove
            entries.

            Multiple changes can be applied in a row. The actual re-writing
            of the file is done using write().

            We attempt to modify the package in the current working directory
            unless another PackageDetector class is provided.
        """
        if details is not None:
            Any.requireIsInstance(details, PackageDetector)
            self._details = details
        else:
            self._details = PackageDetector()

        self._filePath = os.path.join(self._details.topLevelDir, 'pkgInfo.py')
        self._worker = PkgInfoWriter(self._details, sourceTree=True)

        try:
            self._content = FastScript.getFileContent(self._filePath)
            self.checkSyntax()
            self._data = getPkgInfoContent(dirName=self._details.topLevelDir)

        except IOError:
            self._content = self._worker.addLeadIn()

        except SyntaxError:
            raise
    def run(self):
        if not 'category' in self.values:
            self.values['category'] = 'External'

        self.createMainPackage()

        srcDir = os.path.join(self.templateDir,
                              'External_CMake_out_of_tree_build')
        dstDir = self.dstDir

        for fileName in FastScript.getFilesInDir(srcDir, '.php'):
            srcFile = os.path.join(srcDir, fileName)
            dstFile = os.path.join(dstDir, fileName)
            self.copyVerbatim(srcFile, dstFile)

        self.copyVerbatim(os.path.join(srcDir, 'pkgInfo.py'),
                          os.path.join(dstDir, 'pkgInfo.py'))

        FastScript.remove(os.path.join(dstDir, 'unittest.sh'))

        # create exemplarily (fake-)tarball file, and interface-symlink to it
        tarball = os.path.join(dstDir, 'src', 'Example-1.0-src.tar.bz2')
        symlink = os.path.join(dstDir, 'src', 'sources.tar.bz2')

        logging.info('processing %s' % tarball)
        FastScript.setFileContent(tarball, '')

        logging.info('processing %s' % symlink)
        os.symlink('Example-1.0-src.tar.bz2', symlink)

        # create basic packageVar.cmake
        #
        # Note: for some reason calling FastScript.changeDirectory() with rel. path failed,
        # continuing with absolute path as workaround
        dstDir = os.path.abspath(dstDir)
        Any.requireIsDir(dstDir)

        details = PackageDetector(dstDir)
        details.retrieveMakefileInfo()

        fileName = os.path.join(dstDir, 'packageVar.cmake')
        PackageVarCmakeWriter(details).write(fileName)
    def populate(self):
        """
            Scans all package in SIT and stores the ground-truth pkgInfo.py
            information into one giant hashtable for later fast access.

            The assignment is "packageURL": { pkgInfo data }
        """
        sitPath = SIT.getPath()
        canonicalPaths = SIT.getCanonicalPaths(sitPath)
        Any.requireIsListNonEmpty(canonicalPaths)

        for canonicalPath in canonicalPaths:
            ProjectProperties.requireIsCanonicalPath(canonicalPath)

            packageURL = 'sit://' + canonicalPath
            installRoot = os.path.join(sitPath, canonicalPath)
            detector = PackageDetector(installRoot)
            detector.retrieveMakefileInfo()

            self._cache[packageURL] = detector
def makeShellfiles(projectRoot):
    """
        Creates all the various BashSrc, pkgInfo.py etc. files.

        If <topLevelDir>/<fileName> exists it will be copied instead of
        auto-generated. This allows writing fully handcrafted files if
        necessary.

        'topLevelDir' is assumed to be a source code working copy
        (including the version, e.g. "/home/foo/mycode/Spam/42.0")

    """
    Any.requireIsDir(projectRoot)

    oldcwd = os.getcwd()
    FastScript.changeDirectory(projectRoot)

    # collect package details once (this function is internally multi-threaded)

    try:
        details = PackageDetector(projectRoot)
        details.retrieveMakefileInfo()
        details.retrieveVCSInfo()
    except AttributeError:
        raise ValueError('Unable to create shellfiles in path="%s", is this '
                         'a package directory?' % projectRoot)
    except ValueError as details:
        raise ValueError(details)

    FastScript.mkdir('./install')

    if os.path.exists('BashSrc'):
        logging.info('cp BashSrc ./install/')
        shutil.copy2('BashSrc', './install/BashSrc')
    else:
        BashSrcWriter(details).write('./install/BashSrc')

    if os.path.exists('CmdSrc.bat'):
        logging.info('cp CmdSrc.bat ./install/')
        shutil.copy2('CmdSrc.bat', './install/CmdSrc.bat')
    else:
        CmdSrcWriter(details).write('./install/CmdSrc.bat')

    # Note: pkgInfo.py is always generated (merged)
    PkgInfoWriter(details).write('./install/pkgInfo.py')

    if os.path.exists('packageVar.cmake'):
        logging.info('cp packageVar.cmake ./install/')
        shutil.copy2('packageVar.cmake', './install/packageVar.cmake')
    else:
        # try to generate a reasonable file (put explicitly under ./install/
        # to indicate it's a installation-temporary file
        #
        # if the user wants to handcraft it, he could move this
        # auto-generated file to ./packageVar.cmake and add it to VCS
        PackageVarCmakeWriter(details).write('./install/packageVar.cmake')

    FastScript.changeDirectory(oldcwd)
Esempio n. 11
0
 def open( self, topLevelDir ):
     self.detector = PackageDetector( topLevelDir )
     self.detector.retrieveMakefileInfo()
     self.url      = 'sit://' + self.detector.canonicalPath
Esempio n. 12
0
class BSTPackage( AbstractPackage ):
    """
        A software package in ToolBOS SDK scope, f.i. which can be
        installed into SIT and/or processed with "BST.py".
    """
    def __init__( self, url=None ):
        super( BSTPackage, self ).__init__( url )

        self.detector = None


    def getDepInstallCmd_APT( self ):
        """
            Returns the Debian/Ubuntu command-line to install all the
            dependencies found by retrieveDependencies().
        """
        from ToolBOSCore.Platforms import Debian

        Any.requireIsSet( self.depSet, 'Please call .retrieveDependencies() first' )

        if self.depSet:
            deps = list( self.depSet )
            deps.sort()
            return Debian.getDepInstallCmd( deps )
        else:
            return None


    def open( self, topLevelDir ):
        self.detector = PackageDetector( topLevelDir )
        self.detector.retrieveMakefileInfo()
        self.url      = 'sit://' + self.detector.canonicalPath


    def retrieveDependencies( self, recursive,
                              normalDeps=True, buildDeps=False,
                              recommendations=False, suggestions=False ):
        Any.requireIsNotNone( self.detector, 'Please call .open() first' )
        Any.requireIsBool( recursive )
        Any.requireIsBool( normalDeps )
        Any.requireIsBool( buildDeps )
        Any.requireIsBool( recommendations )
        Any.requireIsBool( suggestions )

        self.detector.retrieveMakefileInfo()

        self.depSet  = set()
        self.depTree = list()
        debPrefix    = 'deb://'
        sitPrefix    = 'sit://'
        hostPlatform = getHostPlatform()


        if normalDeps:
            self.depSet = set( self.detector.dependencies )

            try:
                self.depSet.update( self.detector.dependsArch[ hostPlatform ] )
            except KeyError:
                pass                             # no such setting, this is OK


        if buildDeps:
            self.depSet.update( self.detector.buildDependencies )

            try:
                self.depSet.update( self.detector.buildDependsArch[ hostPlatform ] )
            except KeyError:
                pass                             # no such setting, this is OK


        # create a temporary copy of self.depSet while iterating,
        # otherwise leads to "RuntimeError: Set changed size during iteration"
        for packageURL in copy.copy( self.depSet ):
            ProjectProperties.requireIsURL( packageURL )

            error = False

            if packageURL.startswith( sitPrefix ):
                depPkg = BSTProxyInstalledPackage( packageURL )
                try:
                    depPkg.open()

                    if recursive:
                        depPkg.retrieveDependencies( recursive, normalDeps, buildDeps,
                                                     recommendations, suggestions )
                    else:
                        depPkg.depSet  = set()
                        depPkg.depTree = list()
                except AssertionError as details:
                    logging.debug( details )
                    depPkg.depSet  = set()
                    depPkg.depTree = list()
                    error          = True

            elif packageURL.startswith( debPrefix ):
                depPkg = DebianPackage( packageURL )

                try:
                    depPkg.retrieveDependencies()
                except EnvironmentError as details:
                    logging.warning( details )
                    depPkg.depSet  = set()
                    depPkg.depTree = list()

            else:
                raise ValueError( 'Unknown URL prefix in "%s"' % packageURL )

            if not error:
                self.depSet.update( depPkg.depSet )

            self.depSet.add( packageURL )
            self.depTree.append( depPkg )
Esempio n. 13
0
    def __init__( self, topLevelDir=None ):
        if topLevelDir is None:
            topLevelDir = os.getcwd()

        self.details = PackageDetector( topLevelDir )
        self.details.retrieveMakefileInfo()
Esempio n. 14
0
class PatchSystem( object ):
    """
        Generic container for frequently changing statistics and patches
        that shall be executed within the Continuous Integration system.
    """
    def __init__( self, topLevelDir=None ):
        if topLevelDir is None:
            topLevelDir = os.getcwd()

        self.details = PackageDetector( topLevelDir )
        self.details.retrieveMakefileInfo()

    def _patchCIA681( self, dryRun=False ):
        """
            Replaces "make unpack" in pre-configure.sh by direct call to
            "UnpackSources.sh".
        """
        fileName = 'pre-configure.sh'
        old      = "make unpack"
        new      = "${TOOLBOSCORE_ROOT}/include/UnpackSources.sh"
        status   = self._replace( fileName, old, old, new, 'CIA-680', dryRun )

        if status:
            return [ fileName ]


    def _patchCIA727( self, dryRun=False ):
        """
            Upgrade XIF packages to SplitterBBCMMaker 1.3
        """
        # Part 1: update SplitterBBCMMaker version in pre-configure.sh

        fileName1 = 'pre-configure.sh'
        old       = 'SplitterBBCMMaker/1.2/bin/MakeDataWrapper.sh'
        new       = 'SplitterBBCMMaker/1.3/bin/MakeDataWrapper.sh'
        status    = self._replace( fileName1, old, old, new, 'CIA-727a', dryRun )


        # Part 2: update SplitterBBCMMaker version in post-install.sh

        if status:
            # Note: The old post-install.sh files contain a lot of
            # additional code. We agreed to completely replace such scripts
            # with a one-liner script

            fileName2 = 'post-install.sh'
            old       = 'SplitterBBCMMaker/1.2/bin/MakeSplitterBBCM.sh'
            new       = '#!/bin/bash\n' + \
                        '$SIT/DevelopmentTools/SplitterBBCMMaker/1.3/bin/MakeSplitterBBCM.sh ' + \
                        '%s . %s\n\n' % ( self.details.packageName,
                                          self.details.packageVersion )

            # this does a replacement within the file, which is undesired...
            status2 = self._replace( fileName2, old, old, new, 'CIA-727b', dryRun )

            if status2 == True and dryRun == False:
                # ...instead we want to get rid of extra code and reset
                # the entire file content, effectively shortening the file
                FastScript.setFileContent( fileName2, new )

            return [ fileName1, fileName2 ]

        else:
            return False


    def _patchCIA765( self, dryRun=False ):
        """
            Upgrades the XIF package versions.
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_XIF(), 'CIA-765' )


    def _patchCIA866( self, dryRun=False ):
        """
            Remove invalid pkgInfo.py from BBCMs / BBDMs / VMs that were
            erronously added at a very early time when pkgInfo.py was
            introduced.
        """
        fileName = 'pkgInfo.py'
        modified = []

        if not self.details.packageCategory:
            logging.error( 'CMakeLists.txt: No such file' )
            return []

        if self.details.packageCategory.startswith( 'Modules' ) and \
           os.path.exists( fileName ):

            output = StringIO()
            modified.append( fileName )

            try:
                if not dryRun:
                    vcs = VersionControl.auto()
                    vcs.remove( fileName, output=output )
            except subprocess.CalledProcessError:
                pass        # maybe using git

        return modified


    # def _patchCIA868( self, dryRun=False ):
    #     """
    #         Replace old CMakeLists.txt files by new ones from the package
    #         templates in order to spread the new CMake build rules for Matlab
    #         wrapper generation.
    #     """
    #     fileName = 'wrapper/CMakeLists.txt'
    #     template = os.path.join( PackageCreator.templateDir,
    #                              'HDot_Component_Evaluation',
    #                              'CMakeLists.txt' )
    #
    #     Any.requireIsFileNonEmpty( template )
    #
    #     try:
    #         oldContent = FastScript.getFileContent( fileName )
    #     except IOError:
    #         # package is not affected (has no wrapper code)
    #         return False
    #
    #     if oldContent.find( 'bst_build_wrapper' ) == -1:
    #         shutil.copyfile( template, fileName )
    #         return [ fileName ]
    #     else:
    #         return False


    def _patchCIA923( self, dryRun=False ):
        """
            Updates the script path in pre-configure.sh from:
                ${TOOLBOSCORE_ROOT}/make/UnpackSources.sh
            to:
                ${TOOLBOSCORE_ROOT}/include/UnpackSources.sh

        """
        fileName = 'pre-configure.sh'
        old      = '${TOOLBOSCORE_ROOT}/make/UnpackSources.sh'
        new      = '${TOOLBOSCORE_ROOT}/include/UnpackSources.sh'
        status   = self._replace( fileName, old, old, new, 'CIA-923', dryRun )

        if status:
            return [ fileName ]


    def _patchCIA955( self, dryRun=False ):
        """
            BST.py requires CMake 2.8.8 for building library packages.

            However the CMakeLists.txt template stated a minimum required
            version of "2.6" which should be replaced by "2.8.8" accordingly.

            Otherwise the user will receive strange error messages when trying
            to compile with an older version than 2.8.8.
        """
        fileName = 'CMakeLists.txt'
        old      = 'cmake_minimum_required(VERSION 2.6)'
        new      = 'cmake_minimum_required(VERSION 2.8.8)'
        status   = False

        # This patch is only necessary if the package is about building
        # libraries

        try:
            content = FastScript.getFileContent( fileName )
        except IOError:
            logging.debug( '%s: No such file' % fileName )
            return False


        if 'bst_build_libraries' in content or 'add_library' in content:
            status = self._replace( fileName, old, old, new, 'CIA-955', dryRun )
        else:
            logging.debug( 'neither bst_build_libraries() nor add_library() found' )

        if status:
            return [ fileName ]


    def _patchCIA977( self, dryRun=False ):
        """
            Updates the Makefile of a BBCM component to call "MakeBBCM.py"
            instead of "RunTemplate.php".
        """
        if not self.details.isBBCM():
            logging.debug( 'package is not a BBCM' )
            return False

        srcFile    = os.path.join( PackageCreator.templateDir, 'C_BBCM', 'Makefile' )
        dstFile    = 'Makefile'

        if not os.path.exists( dstFile ):
            logging.debug( 'package has no Makefile, patch does not apply' )
            return False

        srcContent = FastScript.getFileContent( srcFile )
        dstContent = FastScript.getFileContent( dstFile )

        if srcContent != dstContent:
            if not dryRun:
                FastScript.copy( srcFile, dstFile )
            else:
                logging.debug( '[DRY-RUN] cp %s %s', srcFile, dstFile )

            return [ dstFile ]
        else:
            return False


    def _patchCIA982( self, dryRun=False ):
        """
            Check (and evtl. set) a build-dependency if ToolBOSPluginMatlab
            is included within a separate CMakeLists.txt.
        """
        cmakeFile = 'wrapper/CMakeLists.txt'

        if not os.path.exists( cmakeFile ):
            logging.debug( '%s: No such file', cmakeFile )
            return False

        cmakeContent = FastScript.getFileContent( cmakeFile )
        Any.requireIsTextNonEmpty( cmakeContent )

        # check if dependencies included in the wrapper/CMakeLists.txt
        # appear in the normally detected depends/buildDepends sections
        #
        # if not, then add it at least as a build-dependency

        newBuildDeps = copy.copy( self.details.buildDependencies )

        for package in CMakeLists.getDependencies( cmakeContent ):
            Any.requireIsTextNonEmpty( package )

            if package not in self.details.dependencies and \
               package not in self.details.buildDependencies:
                logging.info( 'adding build-dependency: %s', package )

                newBuildDeps.append( package )


        # if some additional build-dependencies have been found then store
        # them into pkgInfo.py
        if self.details.buildDependencies != newBuildDeps:
            p = PkgInfoInterface( self.details )
            p.set( 'buildDepends', newBuildDeps )

            if not dryRun:
                p.write()

            return [ 'pkgInfo.py' ]
        else:
            return False


    def _patchCIA988( self, dryRun=False ):
        """
            Removes legacy PHP files from repository.
        """
        candidates = ( 'packageVar.php',
                       self.details.packageName + '.php' )  # in old BBCMs

        result     = []

        for fileName in candidates:
            if os.path.exists( fileName ):

                if not dryRun:
                    try:
                        vcs = VersionControl.auto()
                        vcs.remove( fileName )
                    except subprocess.CalledProcessError:
                        # maybe using git
                        FastScript.remove( fileName )

                result.append( fileName )

        return result


    def _patchCIA989( self, dryRun=False ):
        """
            Removes legacy ANY_DEF_*_TAG macros.
        """
        if self.details.canonicalPath.find( 'DevelopmentTools/ToolBOS' ) == -1 and \
           self.details.canonicalPath.find( 'Libraries/ToolBOSLib'     ) == -1 and \
           self.details.canonicalPath.find( 'Applications/ToolBOS'     ) == -1:

            result = []
            ticket = 'CIA-989'

            pkgNameUpper  = self.details.packageName.upper()
            srcTagPattern = re.compile( '(ANY_DEF_SRCTAG\s?\(.*?\);)', re.DOTALL )

            for fileName in FastScript.getFilesInDir( 'src' ):
                fileName = os.path.join( 'src', fileName )
                modified = False
                new      = ''

                old = 'ANY_DEF_BINTAG;\n'
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                old = '/* File tag */\n'
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                old = '/*---------------------------------------------*/\n' + \
                      '/* File tag                                    */\n' + \
                      '/*---------------------------------------------*/\n'
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                old = '#define %s_C_SRCTAG\n' % pkgNameUpper
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                old = '#undef %s_C_SRCTAG\n' % pkgNameUpper
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                content = FastScript.getFileContent( fileName )

                tmp = srcTagPattern.search( content )

                if tmp:
                    old = tmp.group(1)
                    if self._replace( fileName, old, old, new, ticket, dryRun ):
                        modified |= True


                # use the occasion and replace some more clutter
                old = '/* Temporary solution for a local debug level */\n'
                if self._replace( fileName, old, old, new, ticket, dryRun ):
                    modified |= True


                # remove too many empty lines left-over from previous replacements
                if modified:
                    old = '\n\n\n\n\n'
                    new = '\n\n'
                    self._replace( fileName, old, old, new, ticket, dryRun )

                    result.append( fileName )

        else:

            logging.info( 'skipping ToolBOS packages to not patch backward-compatibility layers' )

            return []


    def _patchCIA1094( self, dryRun=False ):
        """
            Replace fixed values of valid-flags by randomized value to
            discover memory problems.
        """
        old1      = '0x900db00f'
        old2      = '0xdeadb00f'
        result    = []
        ticket    = 'CIA-1094'
        whitelist = [ '.c', '.h', '.cpp', '.hpp', '.inc' ]


        def check( fileName ):
            Any.requireIsFile( fileName )

            content = FastScript.getFileContent( fileName )

            return content.find( old1 ) != -1 or \
                   content.find( old2 ) != -1



        for fileName in FastScript.findFiles( 'src', ext=whitelist ):
            modified = False


            while check( fileName ):
                new1, new2 = PackageCreator.randomizeValidityFlags()

                # search for boilerplate valid-flag
                if self._replace( fileName, old1, old1, new1, ticket, dryRun, 1 ):
                    modified |= True

                # search for boilerplate invalid-flag
                if self._replace( fileName, old2, old2, new2, ticket, dryRun, 1 ):
                    modified |= True


            if modified:
                result.append( fileName )


        return result


    def _patchCIA1112( self, dryRun=False ):
        """
            Replace legacy HRI_GLOBAL_ROOT variable by SIT, except for
            BBML graph files where this is part of the specification.
        """
        old       = 'HRI_GLOBAL_ROOT'
        new       = 'SIT'
        result    = []
        ticket    = 'CIA-1112'
        whitelist = [ '.bat', '.c', '.h', '.cpp', '.hpp', '.inc', '.py',
                      '.sh',
                      '.cmake',             # e.g. packageVar.cmake
                      ''                    # e.g. BashSrc (no extension)
                      ]


        # do not patch the ToolBOS package itself, otherwise this function
        # would be patched to replace "SIT" by "SIT" ;-)
        #
        # We generally assume that the ToolBOS packages themselves are
        # clean from legacy occurrences.

        if self.details.canonicalPath.find( 'DevelopmentTools/ToolBOS' ) == -1 and \
           self.details.canonicalPath.find( 'Applications/ToolBOS'     ) == -1:

            for fileName in FastScript.findFiles( '.', ext=whitelist ):
                modified = self._replace( fileName, old, old, new, ticket, dryRun )

                if modified:
                    result.append( fileName )

            return result

        else:

            logging.info( 'skipping ToolBOS packages to not patch backward-compatibility layers' )

            return []


    def _patchCIA1143( self, dryRun=False ):
        """
            Replace legacy C macro ANY_TNALLOC by ANY_NTALLOC for
            consistency reasons with the order of the parameters.
        """
        old       = 'ANY_TNALLOC'
        new       = 'ANY_NTALLOC'
        result    = []
        ticket    = 'CIA-1143'
        whitelist = [ '.c', '.h', '.cpp', '.hpp', '.inc' ]


        # do not patch the ToolBOS package itself, otherwise this function
        # would patch the deprecated ANY_TNALLOC macro itself, which is
        # kept for compatibility reasons.
        #
        # We generally assume that the ToolBOS packages themselves are
        # clean from legacy occurrences.

        if self.details.canonicalPath.find( 'DevelopmentTools/ToolBOS' ) == -1 and \
           self.details.canonicalPath.find( 'Libraries/ToolBOSLib'     ) == -1:

            for fileName in FastScript.findFiles( '.', ext=whitelist ):
                modified = self._replace( fileName, old, old, new, ticket, dryRun )

                if modified:
                    result.append( fileName )

            return result

        else:

            logging.info( 'skipping ToolBOS packages to not patch backward-compatibility layers' )

            return []


    def _patchCIA1147( self, dryRun=False ):
        """
            Remove obsolete BBCM_INFO_CATEGORY.
        """
        if not self.details.isBBCM():
            logging.debug( 'package is not a BBCM' )
            return False

        fileName = 'src/%s_info.c' % self.details.packageName

        if not os.path.exists( fileName ):
            return False


        lines      = FastScript.getFileContent( fileName, splitLines=True )
        modified   = []
        newContent = ''

        for line in lines:
            if line.startswith( 'BBCM_INFO_CATEGORY' ):
                modified.append( fileName )
            else:
                newContent += line

        if modified and dryRun is False:
            FastScript.setFileContent( fileName, newContent )


        return modified


    def _patchCIA1185( self, dryRun=False ):
        """
            Upgrades the ToolBOS SDK dependency.

            So far packages included "DevelopmentTools/ToolBOSCore/2.0" but
            we don't know if actually the build system related part or the
            core library was used from it.

            Updating to ToolBOSLib/3.0 to be on the safe side, and it has
            ToolBOSCore/3.0 as dependency. If we include more than needed
            people can manually reduce the dependency to ToolBOSCore/3.0
            only, otherwise it typically won't harm anyways.
        """
        if self.details.canonicalPath.find( 'DevelopmentTools/ToolBOSCore' ) == -1:
            # other ToolBOS packages like ToolBOSLib etc. should not have a
            # problem if we attempt to patch them (although no patch needed)

            return self._dependencyUpgrade( dryRun, _getReplacementMap_ToolBOS(), 'CIA-1185' )

        else:

            logging.info( 'skipping ToolBOS packages to not patch backward-compatibility layers' )

            return []


    def _patchCIA1191( self, dryRun=False ):
        """
            Replaces the dependency to AllPython 2.7 by Anaconda2 5.2
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_Anaconda2(), 'CIA-1191' )


    def _patchCIA1216( self, dryRun=False ):
        """
            Upgrades all Matlab versions in use to latest release by today (9.4).
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_Matlab94(), 'CIA-1216' )


    def _patchCIA1226( self, dryRun=False ):
        """
            Upgrades all the BPL libraries from 7.1 --> 7.2.
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_BPL72(), 'CIA-1226' )


    def _patchCIA1237( self, dryRun=False ):
        """
            Upgrades ToolBOSLib from 3.0 --> 3.1.
        """
        if self.details.packageName == 'ToolBOSLib':
            logging.info( 'skipping ToolBOS packages to not patch backward-compatibility layers' )
            return []

        else:

            return self._dependencyUpgrade( dryRun, _getReplacementMap_ToolBOSLib31(), 'CIA-1237' )


    def _patchCIA1238( self, dryRun=False ):
        """
            Upgrades all the BBDMs from 1.7 --> 1.8.
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_BBDM18(), 'CIA-1238' )



    def _patchCIA1239( self, dryRun=False ):
        """
            Upgrades all RTMaps versions in use to latest release by today (4.5.6).
        """
        return self._dependencyUpgrade( dryRun, _getReplacementMap_RTMaps456(), 'CIA-1239' )


    def _patchCIA1251( self, dryRun=False ):
        """
            Upgrades components from ToolBOS Middleware from 3.0 --> 3.3.
        """
        if self.details.isComponent():
            return self._dependencyUpgrade( dryRun, _getReplacementMap_Middleware33(), 'CIA-1251' )

        else:
            return False


    def _patchCIA1265( self, dryRun=False ):
        """
            Checks BBCM package for included outdated headerfiles and
            replaces them with the nowadays version.
            Duplicates will be deleted.
        """
        if not self.details.isBBCM():
            return False

        # Get a list of all files to check
        files    = FastScript.getFilesInDirRecursive( 'src' )
        modified = []

        # Check every file
        for filePath in files:
            logging.debug( 'processing %s', filePath )

            # Only rewrite line if changed
            rewrite = False
            # Get file content
            fileContent = FastScript.getFileContent( filename=filePath,
                                                     splitLines=True )
            # Check every line
            for line in fileContent:
                item = line.split()

                # Check for include statement
                if line.find( '#include <BBDM' ) != -1:
                    # Replace old package names with the nowadays form
                    match = re.search( r"-A|-CID|-S", item[ 1 ] )
                    if match:
                        fileContent[ fileContent.index( line ) ] = re.sub(
                            r"-A|-CID|-S", "", line )
                        rewrite = True

            includes = []

            # Check file backwards
            for line in reversed( fileContent ):
                item = line.split()

                if line.find( '#include <BBDM' ) != -1:
                    # Check for duplicates and remove existing ones
                    if line in includes:
                        rewrite = True
                        fileContent.remove( line )

                    # add to list of known includes
                    else:
                        includes.append( line )

            # Overwrite file with new content
            try:
                if rewrite:
                    if dryRun:
                        logging.info( '[DRY-RUN] patching %s', filePath )
                    else:
                        logging.info( 'patching %s', filePath )
                        newContent = ''.join( fileContent )
                        FastScript.setFileContent( filePath, newContent )

                    modified.append( filePath )

            except IOError as e:
                logging.error( e )

        return modified


    def _patchCIA1267( self, dryRun=False ):
        """
            Check files for outdated CSV keywords and remove
            these keywords and expanded information from the source code
        """
        files    = FastScript.getFilesInDirRecursive( 'bin' ) | \
                   FastScript.getFilesInDirRecursive( 'examples' ) | \
                   FastScript.getFilesInDirRecursive( 'src' ) | \
                   FastScript.getFilesInDirRecursive( 'test' )

        modified = []

        # CSV keywords
        keywords = frozenset( [ '$Author', '$Date', '$Header', '$Id', '$Log',
                                '$Locker', '$Name', '$RCSfile', '$Revision',
                                '$Source', '$State' ] )

        for filePath in files:
            rewrite     = False

            try:
                fileContent = FastScript.getFileContent( filename=filePath,
                                                         splitLines=True )
            except UnicodeDecodeError:
                # most probably we attempt to read a binary file,
                # e.g. a compiled executable under bin/ or the like
                continue

            # check each line for CSV keyword and remove line if found
            for line in fileContent:
                if any( key in line for key in keywords):
                    rewrite = True
                    fileContent.remove( line )

            if rewrite:
                if dryRun:
                    logging.info( '[DRY-RUN] patching %s', filePath )
                else:
                    logging.info( 'patching %s', filePath )
                    newContent = ''.join( fileContent )
                    FastScript.setFileContent( filePath, newContent )

                modified.append( filePath )

        return modified


    def getPatchesAvailable( self ):
        """
            Returns a list of available patches, each item in the list
            belongs to one patch.

            Each item in the list is a tuple of three elements:
               * description
               * function pointer
               * meaningful SVN commit message as specified by the patch
                 author, could be used if patches are applied in batch mode
        """
        result = [ ( '"make unpack" does not work in CMake-only packages (CIA-689)',
                     self._patchCIA681,
                     "pre-configure.sh: replaced 'make unpack' by call to 'UnpackSources.sh'" ),

                   ( 'upgrade SplitterBBCMMaker version to 1.3 (CIA-727)',
                     self._patchCIA727,
                     'upgraded SplitterBBCMMaker version to 1.3 (CIA-727)' ),

                   ( 'upgrade XIF package versions (CIA-765)',
                     self._patchCIA765,
                     'upgraded XIF package versions (CIA-765)' ),

                   # ( 'update MEX building rules in H.Dot packages (CIA-868)',
                   #   self._patchCIA868,
                   #   'updated Matlab wrapper build rules (CIA-868)' ),

                   ( 'pre-configure.sh update (CIA-923)',
                      self._patchCIA923,
                     'updated path of UnpackSources.sh script (CIA-923)' ),

                   ( 'bst_build_libraries() requires CMake 2.8.8 (CIA-955)',
                     self._patchCIA955,
                     'CMakeLists.txt: updated min. CMake version to 2.8.8' ),

                   ( "run 'MakeBBCM.py' instead of 'make BBCM' (CIA-977)",
                     self._patchCIA977,
                     'Makefile: replaced call to RunTemplate.php by MakeBBCM.py (CIA-977)' ),

                   ( 'add dependency to ToolBOSPluginMatlab (CIA-982)',
                     self._patchCIA982,
                     'pkgInfo.py: added build-dependencies (CIA-982)' ),

                   ( 'repository clean-up (CIA-988)',
                     self._patchCIA988,
                     'removed legacy / unused PHP files (CIA-988)' ),

                   ( 'remove legacy ANY_DEF_*_TAG macros (CIA-989)',
                     self._patchCIA989,
                     'removed legacy ANY_DEF_*_TAG and related macros (CIA-989)' ),

                   ( 'update valid-flags to detect memory problems (CIA-1094)',
                     self._patchCIA1094,
                     'replaced boilerplate valid-flags to better detect memory problems (CIA-1094)' ),

                   ( 'replace $HRI_GLOBAL_ROOT by $SIT (CIA-1112)',
                     self._patchCIA1112,
                     'replaced $HRI_GLOBAL_ROOT by $SIT (CIA-1112)' ),

                   ( 'replace ANY_TNALLOC by ANY_NTALLOC (CIA-1143)',
                     self._patchCIA1143,
                     'replaced ANY_TNALLOC by ANY_NTALLOC (CIA-1143)' ),

                   ( 'remove obsolete BBCM_INFO_CATEGORY (CIA-1147)',
                     self._patchCIA1147,
                     'removed obsolete BBCM_INFO_CATEGORY (CIA-1147)' ),

                   ( 'upgrading ToolBOS dependency (CIA-1185)',
                     self._patchCIA1185,
                     'upgraded ToolBOS dependency 2.0 --> 3.0 (CIA-1185)' ),

                   ( 'replacing dependency to AllPython by Anaconda (CIA-1191)',
                     self._patchCIA1191,
                     'replaced dependency to AllPython by Anaconda (CIA-1191)' ),

                   ( 'upgrading Matlab dependency (CIA-1216',
                     self._patchCIA1216,
                     'upgraded Matlab dependency (CIA-1216)' ),

                   ( 'upgrading BPL dependency (CIA-1226)',
                     self._patchCIA1226,
                     'upgraded BPL dependency 7.1 --> 7.2 (CIA-1226)' ),

                   ( 'upgrading ToolBOSLib dependency (CIA-1237)',
                     self._patchCIA1237,
                     'upgraded ToolBOSLib dependency 3.0 --> 3.1 (CIA-1237)' ),

                   ( 'upgrading BBDM dependency (CIA-1238)',
                     self._patchCIA1238,
                     'upgraded BBDM dependency 1.7 --> 1.8 (CIA-1238)' ),

                   ( 'upgrading RTMaps dependency (CIA-1239)',
                     self._patchCIA1239,
                     'upgraded RTMaps dependency to 4.56 (CIA-1239)' ),

                   ( 'replacing outdated BBDM filenames (CIA-1265)',
                     self._patchCIA1265,
                     'replaced BBDM headerfile names (CIA-1265)' ),

                   ( 'removing outdated CVS keywords (CIA-1267)',
                     self._patchCIA1267,
                     'removed outdated CVS keywords and related code (CIA-1267)' )]

        return result


    def run( self, dryRun=False ):
        """
            Performs all patching routines, if dryRun=True all operations
            are fake (your project won't be altered).
        """
        available = self.getPatchesAvailable()
        descrLen  = 0
        applied   = []

        # determine the longest description to compute number of dashes
        for patch in available:
            descrLen = max( len(patch[0]), descrLen )
            descrLen = max( len(patch[2]), descrLen )


        logging.info( descrLen * '-' )

        for patch in available:
            logging.info( 'EXECUTING PATCH: %s' % patch[0] )
            result = patch[1]( dryRun )

            if result:
                Any.requireIsList( result )
                fileList = FastScript.reduceList( result )
                fileList.sort()

                for item in fileList:
                    logging.info( 'patching %s' % item )

                logging.info( patch[2] )

                applied.append( patch )
            else:
                logging.info( 'no need to patch' )

            logging.info( descrLen * '-' )

        logging.info( '' )

        return applied


    def runPatches_dependencyUpgrade( self ):
        """
            Executes all patches related to dependency upgrades, f.i.
            modify the CMakeLists.txt / packageVar.cmake / ... files.
        """
        self._patchCIA765()    # upgrade XIF


    def _replace( self, fileName, check, old, new, ticketID, dryRun, count=None ):
        """
            Checks if <fileName> contains <check>, and if so it replaces
            the string <old> by <new>.

            If dryRun=True the check for patch necessity will be executed
            normally, but then no files will be altered.

            Returns a boolean if the file was affected or not.
            Returns 'None' if the specified file was not found thus the
            patch might not be applicable.

            If the argument 'count' is given, only the first n occurrences
            will be replaced.
        """
        try:
            content = FastScript.getFileContent( fileName )

        except ( IOError, UnicodeDecodeError ):
            # UnicodeDecodeError may happen when attempting to read a binary
            # file (e.g. executable), skip those as they shall not be patched

            return False

        logging.debug( '%s: searching for "%s"', fileName, check.strip() )
        needed = content.find( check ) > 0
        logging.debug( 'patch "%s" --> "%s" needed: %s' % \
                       ( old.strip(), new.strip(), str(needed) ) )

        if needed and not dryRun:

            if count is None:
                content = content.replace( old, new )
            else:
                content = content.replace( old, new, count )


            FastScript.setFileContent( fileName, content )

        return needed


    def _dependencyUpgrade( self, dryRun, replacementMap, ticketID ):
        """
            Patches the CMakeLists.txt / packageVar.cmake / ...
            files (if necessary) to include different packages.
        """
        Any.requireIsBool( dryRun )
        Any.requireIsDict( replacementMap )
        Any.requireIsTextNonEmpty( ticketID )

        status1  = False
        status2  = False
        status3  = False
        modified = []

        for old, new in replacementMap.items():
            status1 |= self._replace( 'CMakeLists.txt',   old, old, new, ticketID, dryRun )
            status2 |= self._replace( 'packageVar.cmake', old, old, new, ticketID, dryRun )
            status3 |= self._replace( 'pkgInfo.py',       old, old, new, ticketID, dryRun )

        if status1:
            modified.append( 'CMakeLists.txt' )

        if status2:
            modified.append( 'packageVar.cmake' )

        if status3:
            modified.append( 'pkgInfo.py' )

        return modified
class CheckRoutine(object):
    def __init__(self, projectRoot=None, details=None):
        """
            By default scans the package within the current working directory.
            Alternatively 'projectRoot' may be specified to point to any
            other top-level directory of a source package.

            If a PackageDetector instance is already at hand it can be
            provided here to speed-up the things. In such case its
            retrieveMakefileInfo() and retrieveVCSInfo() must have already
            been called.

            You need to call setup() and run() to use it.

            You may optionally specify list of rule IDs to run,
            and/or a set of files to consider.
            The default is to run all checkers on all files of the package.
        """
        self.details = None

        self.includeDirs = set()
        self.includeFiles = set()

        self.excludeDirs = {
            'build', 'external', 'klocwork', 'precompiled', 'sources', '.git',
            '.svn'
        }
        self.excludeFiles = set()

        self.includeExts = {
            '.c', '.h', '.cpp', '.hpp', '.inc', '.py', '.java', '.m'
        }

        self.sqLevelToRun = None  # level to use for this SQ check run

        self.useOptFlags = True  # disabled when invoking setRules()

        self.files = set()  # final list of files to check

        self.rules = {}  # { ID: obj } of all rules (None if n/a)
        self.ruleIDs = set()  # IDs of all SQ rules, not ordered
        self.rulesOrdered = []  # IDs of all SQ rules, sorted
        self.rulesImplemented = set()  # IDs of all implemented rules
        self.rulesInLevel = set()  # IDs of rules required by selected SQ level
        self.rulesRemoved = set()  # IDs of outdated rules
        self.rulesToRun = []  # IDs of rules to run this time, sorted

        self.results = {}  # result data, filled by runParticular()

        self._summaryEnabled = True  # True/False, show stats after run

        self._populatePackage(projectRoot, details)
        self._populateFiles()
        self._populateRules()

    def excludeDir(self, dirPath):
        Any.requireIsTextNonEmpty(dirPath)

        if os.path.exists(dirPath):
            logging.debug('ignoring 3rd-party content in %s', dirPath)

            origFiles = copy.copy(self.files)

            for filePath in origFiles:
                if filePath.startswith(dirPath):
                    self.files.remove(filePath)

    def excludeFile(self, filePath):
        absPath = os.path.join(self.details.topLevelDir, filePath)

        try:
            self.files.remove(absPath)
        except KeyError:
            pass

    def excludeRule(self, ruleID):
        try:
            self.rulesToRun.remove(ruleID)
        except KeyError:
            pass

    def includeDir(self, dirPath):
        Any.requireIsTextNonEmpty(dirPath)

        # FastScript.getFilesInDirRecursive() returns:
        #    * absolute file paths if absolute dir. path provided
        #    * relative file paths if relative dir. Path provided
        #
        # shorten absolute path names to relative ones for shorter outputs:
        searchPath = os.path.relpath(dirPath)

        for filePath in FastScript.getFilesInDirRecursive(searchPath):

            # only consider whitelisted extensions, f.i. do not analyze
            # binaries, bytecode files, PDFs etc.
            fileExt = os.path.splitext(filePath)[-1]

            if fileExt in self.includeExts:
                if not os.path.islink(filePath):
                    self.includeFile(filePath)

    def includeFile(self, filePath):
        Any.requireIsTextNonEmpty(filePath)

        self.files.add(filePath)

    def includeRule(self, ruleID):
        Any.requireIsTextNonEmpty(ruleID)
        Any.requireIsIn(ruleID, self.ruleIDs)

        if ruleID not in self.rulesToRun:
            self.rulesToRun.append(ruleID)

    def overallResult(self):
        for result in self.results:

            if result[0] is FAILED:
                return False

        return True

    def run(self):
        """
            Executes the previously configured checks, no matter if they
            are actually needed in the given quality level or opted-out by
            the maintainer.

            See also
              * setRules()
              * setUseOptFlags()
        """
        for ruleID in self.rulesToRun:

            if ruleID in self.rulesRemoved:
                logging.info('')
                logging.info('%s: Rule has been removed', ruleID)

            elif ruleID in self.rulesImplemented:
                logging.info('')
                self._runCheck(ruleID)
            else:
                logging.info('')
                logging.info('%s: Not implemented', ruleID)

        logging.info('')

        if self._summaryEnabled:
            self._showSummary()

    def setDirs(self, dirs):
        """
            Check the given set of directories, regardless other settings.

            Note that filename-extensions apply.
        """
        Any.requireIsSet(dirs)

        self.files = set()

        for path in dirs:
            self.includeDir(path)

    def setFiles(self, files):
        """
            Check the given set of files, regardless other settings.
        """
        Any.requireIsSet(files)

        self.files = files

    def setLevel(self, levelName):
        """
            Performs the check using a particular pre-defined quality set.

            This overrides the (optional) setting 'sqLevel' in the
            pkgInfo.py of the package to test.
        """
        Any.requireIsTextNonEmpty(levelName)
        Any.requireIsIn(levelName, sqLevelNames)

        self.sqLevelToRun = levelName

    def setRules(self, ruleIDs):
        """
            Run only the given list of rules, instead of all.

            This overrides the (optional) settings 'sqOptIn' and/or
            'sqOptOut' in the pkgInfo.py of the package to test.
        """
        Any.requireIsListNonEmpty(ruleIDs)

        for ruleID in ruleIDs:
            Any.requireIsIn(ruleID, self.ruleIDs)

        self.rulesToRun = ruleIDs
        self.useOptFlags = False

    def setup(self):
        """
            Considers the opt-in/out files/rules in the pkgInfo.py (if any).
        """
        self._setupSqLevel()
        self._setupOptIn()
        self._setupOptOut()

    def showSummary(self, state):
        """
            Force showing (or not) a summary at the end of run().
        """
        Any.requireIsBool(state)

        self._summaryEnabled = state

    def _computeSuccessRate(self, ruleID):
        """
            Computes the success rate (in percent) for a given rule,
            based on the values returned by the corresponding checker.
        """
        Any.requireIsTextNonEmpty(ruleID)

        (status, passed, failed, shortText) = self.results[ruleID]

        # in case of 'not required' do not display any arbitrary number
        # like 0% or 100% (does not make sense)

        if status in (OK, FAILED):

            total = passed + failed

            try:
                percent = float(passed) / float(total) * 100
            except ZeroDivisionError:
                # Devision by zero can only happen in case the total number
                # is zero, f.i. the check did not apply to any file.
                # Set percentage to 100% in this case == success.
                percent = 100

            return '%3d%%' % percent

        else:

            return ''

    def _populateFiles(self):
        """
            Performs an initial scan for files in the given project.

            Later on the user may customize this list using the
            corresponding functions.
        """
        self.includeDir(self.details.topLevelDir)

    def _populatePackage(self, projectRoot, details):
        if details:
            Any.requireIsInstance(details, PackageDetector)
            self.details = details

        else:
            BuildSystemTools.requireTopLevelDir(projectRoot)

            logging.info('analyzing package... (this may take some time)')
            self.details = PackageDetector(projectRoot)
            self.details.retrieveMakefileInfo()
            self.details.retrieveVCSInfo()

    def _populateRules(self, forceRules=None):
        """
            Discovers available / not implemented / opted-in / opted-out
            checkers.

            'forceRules' is supposed to be an ordered list of rule IDs
            to get executed this time. In case of 'None' all rules will
            get checked.
        """
        checkersAvailable = Rules.getRules()
        Any.requireIsListNonEmpty(checkersAvailable)

        for elem in checkersAvailable:
            Any.requireIsTuple(elem)

            (ruleID, rule) = elem
            Any.requireIsTextNonEmpty(ruleID)
            Any.requireIsInstance(rule, Rules.AbstractRule)

            self.ruleIDs.add(ruleID)

            self.rules[ruleID] = rule

            self.rulesOrdered.append(ruleID)

            if hasattr(rule, 'run'):
                self.rulesImplemented.add(ruleID)

                # will get overwritten below if 'forceRules' provided
                self.rulesToRun.append(ruleID)

            elif rule.removed:
                self.rulesRemoved.add(ruleID)

            # else: rule not implemented, yet

        if forceRules is not None:
            Any.requireIsListNonEmpty(forceRules)

            for ruleID in forceRules:
                Any.requireIsTextNonEmpty(ruleID)
                Any.requireIsIn(ruleID, self.ruleIDs)

            self.rulesToRun = forceRules

    def _printEnabled(self):
        """
            Print the final list of files and rules, for debugging purposes
            to see what we are going to execute / check.
        """
        logging.debug('')

        logging.debug('checking files:')

        for filePath in sorted(self.files):
            logging.debug(filePath)

        logging.debug('')
        logging.debug('checking rules:')
        logging.debug(' '.join(self.rulesToRun))
        logging.debug('')

    def _runCheck(self, ruleID):
        """
            Executes the checker for the specified rule ID, and stores the
            result in self.results[ ruleID ].

            Raises a KeyError upon invalid ID, and AttributeError if not
            implemented.
        """
        Any.requireIsTextNonEmpty(ruleID)

        logging.info('checking rule: %s', ruleID)

        if self.useOptFlags and ruleID in self.details.sqOptOutRules:
            result = (DISABLED, None, None, 'explicitly opt-out in pkgInfo.py')

        else:

            if self.useOptFlags and ruleID in self.details.sqOptInRules:
                logging.info('explicitly enabled in pkgInfo.py')

            result = self._runCheck_worker(ruleID)

        status = result[0]
        msg = '(' + str(result[3]) + ')' if result[3] else ''

        logging.info('checking rule: %s → %s %s', ruleID, status, msg)

        try:
            comment = self.details.sqComments[ruleID]
            logging.info('comment by maintainer: "%s"', comment)
        except KeyError:
            pass

        self.results[ruleID] = result

    def _runCheck_worker(self, ruleID):
        Any.requireIsTextNonEmpty(ruleID)

        rule = self.rules[ruleID]

        if ruleID in self.rulesImplemented:
            result = rule.run(self.details, self.files)
        else:
            result = (NOT_IMPLEMENTED, None, None, None)

        Any.requireIsTuple(result)

        return result

    def _setupSqLevel(self):
        # force run under certain SQ level if provided, otherwise fallback
        # to the pkgInfo.py setting

        if not self.sqLevelToRun:
            self.sqLevelToRun = self.details.sqLevel


        msg = '"%s": No such quality level (allowed: %s)' % \
              ( self.sqLevelToRun, ', '.join( sqLevelNames ) )
        Any.requireMsg(self.sqLevelToRun in sqLevelNames, msg)

        for ruleID, rule in self.rules.items():
            if ruleID not in self.rulesImplemented:
                continue

            rule = self.rules[ruleID]

            Any.requireIsInstance(rule, Rules.AbstractRule)
            Any.requireIsInstance(rule.sqLevel, frozenset)

            if self.sqLevelToRun in rule.sqLevel:
                self.rulesInLevel.add(ruleID)

            else:
                # filter-out rules not needed in the level at hand
                # (don't filter-out if we force-run particular rules)
                if self.useOptFlags:

                    logging.debug('%6s: no need to run at level=%s', ruleID,
                                  self.sqLevelToRun)
                    self.rulesToRun.remove(ruleID)

    def _setupOptIn(self):
        Any.requireIsIterable(self.details.sqOptInRules)

        for ruleID in self.details.sqOptInRules:
            logging.debug('%6s: enabled (opt-in via pkgInfo.py)', ruleID)
            self.includeRule(ruleID)

    def _setupOptOut(self):
        Any.requireIsIterable(self.details.sqOptOutRules)

        for ruleID in self.details.sqOptOutRules:
            logging.debug('%6s: disabled (opt-out via pkgInfo.py)', ruleID)

            # Don't do that! it will hide a rule that is supposed to get
            # executed from the normal progress log + report
            #
            # self.excludeRule( ruleID )

    def _showSummary(self):
        """
            Shows a summary of the execution results.
        """
        self._showSummaryHeadline()
        self._showSummaryTable()
        self._showSummaryComments()

    def _showSummaryHeadline(self):
        Any.requireIsTextNonEmpty(self.details.canonicalPath)
        Any.requireIsTextNonEmpty(self.details.sqLevel)

        logging.info('')
        logging.info('results for %s (level=%s):', self.details.canonicalPath,
                     self.sqLevelToRun)
        logging.info('')

    def _showSummaryTable(self):
        Any.requireIsDictNonEmpty(self.results)
        Any.requireIsListNonEmpty(self.rulesOrdered)

        for ruleID in self.rulesOrdered:
            if ruleID not in self.rulesImplemented:
                continue

            if ruleID not in self.rulesToRun:
                continue

            (status, passed, failed, shortText) = self.results[ruleID]

            successRate = self._computeSuccessRate(ruleID)

            logging.info('%8s | %14s | %4s | %s', ruleID.ljust(6),
                         status.ljust(14), successRate, shortText)

        logging.info('')

    def _showSummaryComments(self):
        if self.details.sqComments:
            logging.info('comments by maintainer:')
            logging.info('')

            for ruleID in self.rulesOrdered:
                if ruleID in self.details.sqComments:
                    comment = self.details.sqComments[ruleID]

                    logging.info('%8s: "%s"', ruleID, comment)
                    logging.info('')