Пример #1
0
 def join(self):
     if self._queue.empty():
         logger.trace(
             "output_processor: Stream _queue.empty() - flush with None [{}]"
             .format(self._name))
         self._queue.put(None)
     self._queue.join()
Пример #2
0
    def __call__( self, target, source, env ):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
        else:
            destination = self._destination + destination_subdir( env )

        master_index = env.File( os.path.join( self._destination, "test-report-index.json" ) )
        master_report = env.File( os.path.join( self._destination, "test-report-index.json" ) )

        env.Clean( source, master_index )
        env.Clean( source, master_report )

        ReportIndexBuilder.register_report_folders( final_dir=env['abs_final_dir'], destination_dir=self._destination )

        for html_report, json_report in zip(*[iter(source)]*2):
            target.append( os.path.join( destination, os.path.split( str(html_report) )[1] ) )
            json_report_target = env.File( os.path.join( destination, os.path.split( str(json_report) )[1] ) )
            target.append( json_report_target )
            ReportIndexBuilder.update_index( json_report_target, os.path.split(json_report_target.abspath)[0] )

        logger.trace( "sources = [{}]".format( colour_items( [str(s) for s in source] ) ) )
        logger.trace( "targets = [{}]".format( colour_items( [str(t) for t in target] ) ) )

        env.Depends( master_report, target )
        env.Depends( master_index, target )

        return target, source
Пример #3
0
    def __call__( self, node ):
        file_path = str(node)

        for excluded in self._excluded_paths:
            if file_path.startswith( excluded ):
                return

        path, ext = os.path.splitext( file_path )

        if ext and ext in self._ignored_types:
            return

        for allowed in self._allowed_paths:
            prefix = os.path.commonprefix( [ os.path.abspath( file_path ), allowed ] )
            logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format(
                    as_notice( str(node) ),
                    as_notice( node.path ),
                    as_notice( str(allowed) ),
                    as_notice( str(prefix) )
            ) )
            if prefix != allowed:
                return

        logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format(
                as_notice( str(node) ),
                as_notice( node.path ),
                as_notice( str(allowed) ),
                as_notice( str(prefix) )
        ) )

        file_path = os.path.relpath( os.path.abspath( file_path ), self._base_path )
        self._files.add( file_path )
        return
Пример #4
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost ):
        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['emitter']:
            _prebuilt_boost_libraries['emitter'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['emitter'][sconstruct_id].keys()) ) )

        self._stage_dir    = stage_dir

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._libraries    = _lazy_update_library_list( env, True, libraries, _prebuilt_boost_libraries['emitter'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location     = boost.local()
        self._boost        = boost
        self._threading    = True

        self._linktype     = linktype
        self._variant      = variant_name( self._env['variant'].name() )
        self._toolchain    = env['toolchain']
Пример #5
0
def _lazy_update_library_list(env, emitting, libraries, prebuilt_libraries,
                              add_dependents, linktype, boost, stage_dir):
    def build_with_library_name(library):
        return library == 'log_setup' and 'log' or library

    if add_dependents:
        if not emitting:
            libraries = set(
                build_with_library_name(l)
                for l in add_dependent_libraries(boost, linktype, libraries))
        else:
            libraries = add_dependent_libraries(boost, linktype, libraries)

    if not stage_dir in prebuilt_libraries:
        logger.trace("Lazy update libraries list for [{}] to [{}]".format(
            as_info(stage_dir), colour_items(str(l) for l in libraries)))
        prebuilt_libraries[stage_dir] = set(libraries)
    else:
        logger.trace(
            "Lazy read libraries list for [{}]: libraries are [{}]".format(
                as_info(stage_dir), colour_items(str(l) for l in libraries)))
        libraries = [
            l for l in libraries if l not in prebuilt_libraries[stage_dir]
        ]
        prebuilt_libraries[stage_dir].update(libraries)

    return libraries
Пример #6
0
def _lazy_update_library_list( env, emitting, libraries, prebuilt_libraries, add_dependents, linktype, boost, stage_dir ):

    def build_with_library_name( library ):
        if library == 'log_setup':
            return 'log'
        elif library in { 'prg_exec_monitor', 'test_exec_monitor', 'unit_test_framework' }:
            return 'test'
        else:
            return library

    if add_dependents:
        if not emitting:
            libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) )
        else:
            libraries = add_dependent_libraries( boost, linktype, libraries )

    if not stage_dir in prebuilt_libraries:
        logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        prebuilt_libraries[ stage_dir ] = set( libraries )
    else:
        logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        libraries = [ l for l in libraries if l not in prebuilt_libraries[ stage_dir ] ]
        prebuilt_libraries[ stage_dir ].update( libraries )

    return libraries
Пример #7
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ):

        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['action']:
            _prebuilt_boost_libraries['action'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['action'][sconstruct_id].keys()) ) )

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._linktype       = linktype
        self._variant        = variant_name( self._env['variant'].name() )
        self._target_arch    = env['target_arch']
        self._toolchain      = env['toolchain']
        self._stage_dir      = stage_dir

        self._libraries = _lazy_update_library_list( env, False, libraries, _prebuilt_boost_libraries['action'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location       = boost.local()
        self._verbose_build  = verbose_build
        self._verbose_config = verbose_config
        self._job_count      = env['job_count']
        self._parallel       = env['parallel']
        self._threading      = True
Пример #8
0
    def __init__(self, env, stage_dir, libraries, add_dependents, linktype,
                 boost, verbose_build, verbose_config):

        self._env = env

        logger.trace("Requested libraries [{}]".format(
            colour_items(libraries)))

        self._linktype = linktype
        self._variant = variant_name(self._env['variant'].name())
        self._target_arch = env['target_arch']
        self._toolchain = env['toolchain']
        self._stage_dir = stage_dir

        self._libraries = _lazy_update_library_list(env, False, libraries,
                                                    self.prebuilt_libraries,
                                                    add_dependents, linktype,
                                                    boost, self._stage_dir)

        logger.trace("Required libraries [{}]".format(
            colour_items(self._libraries)))

        self._location = boost.local()
        self._verbose_build = verbose_build
        self._verbose_config = verbose_config
        self._job_count = env['job_count']
        self._parallel = env['parallel']
Пример #9
0
    def get_branch(cls, path):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = as_str(
            cls.execute_command(
                "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()),
                path))
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch, remote
Пример #10
0
    def __call__( self, node ):
        file_path = str(node)

        for excluded in self._excluded_paths:
            if file_path.startswith( excluded ):
                return

        path, ext = os.path.splitext( file_path )

        if ext and ext in self._ignored_types:
            return

        for allowed in self._allowed_paths:
            prefix = os.path.commonprefix( [ os.path.abspath( file_path ), allowed ] )
            logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format(
                    as_notice( str(node) ),
                    as_notice( node.path ),
                    as_notice( str(allowed) ),
                    as_notice( str(prefix) )
            ) )
            if prefix != allowed:
                return

        logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format(
                as_notice( str(node) ),
                as_notice( node.path ),
                as_notice( str(allowed) ),
                as_notice( str(prefix) )
        ) )

        file_path = os.path.relpath( os.path.abspath( file_path ), self._base_path )
        self._files.add( file_path )
        return
Пример #11
0
    def GenerateHtmlTestReport( self, target, source, env ):

        self._base_uri = ""
        if self._auto_link_tests:
            self._base_uri = initialise_test_linking( env, link_style=self._link_style )

        # Each source will result in one or more targets so we need to slice the targets to pick up
        # the gcov target (the first one) before we perform the zip iteration
        for s, t in itertools.izip( source, itertools.islice( target, 0, None, len(target)/len(source) ) ):
        #for s, t in itertools.izip( source, target ):
            test_suites = {}

            logger.trace( "source = [{}]".format( as_info(str(s)) ) )
            logger.trace( "target = [{}]".format( as_info(str(t)) ) )

            test_cases = self._read( s.abspath )
            for test_case in test_cases:

                if not 'assertions_count' in test_case:
                    if 'assertions' in test_case:
                        test_case['assertions_count']   = test_case['assertions']
                        test_case['assertions_passed']  = test_case['passed']
                        test_case['assertions_failed']  = test_case['failed']
                        test_case['assertions_aborted'] = test_case['aborted']
                    else:
                        test_case['assertions_count']   = 0
                        test_case['assertions_passed']  = 0
                        test_case['assertions_failed']  = 0
                        test_case['assertions_aborted'] = 0

                self._add_to_test_suites( test_suites, test_case )
            self._write( str(t), env, test_suites, sort_test_cases=self._sort_test_cases )
        return None
Пример #12
0
    def build_library_from_source( self, env, sources=None, library_name=None, linktype=None ):

        from SCons.Script import Flatten

        if not self._source_path and not sources:
            logger.warn( "Attempting to build library when source path is None" )
            return None

        if not library_name:
            library_name = self._name

        if not linktype:
            linktype = self._linktype

        variant_key = env['tool_variant_dir']

        prebuilt_objects   = self.lazy_create_node( variant_key, self._prebuilt_objects )
        prebuilt_libraries = self.lazy_create_node( variant_key, self._prebuilt_libraries )

        local_dir = self._location.local()
        local_folder = self._location.local_folder()

        build_dir = os.path.abspath( os.path.join( env['abs_build_root'], local_folder, env['tool_variant_working_dir'] ) )
        final_dir = os.path.abspath( os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) )

        logger.debug( "build_dir for [{}] = [{}]".format( as_info(self._name), build_dir ) )
        logger.debug( "final_dir for [{}] = [{}]".format( as_info(self._name), final_dir ) )

        obj_suffix = env['OBJSUFFIX']
        obj_builder = env.StaticObject
        lib_builder = env.BuildStaticLib

        if linktype == "shared":
            obj_suffix = env['SHOBJSUFFIX']
            obj_builder = env.SharedObject
            lib_builder = env.BuildSharedLib

        if not sources:
            sources = env.RecursiveGlob( "*.cpp", start=self._source_path, exclude_dirs=[ env['build_dir'] ] )
            sources.extend( env.RecursiveGlob( "*.cc", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )
            sources.extend( env.RecursiveGlob( "*.c", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )

        objects = []
        for source in Flatten( [sources] ):
            rel_path = os.path.relpath( str(source), local_dir )
            rel_obj_path = os.path.splitext( rel_path )[0] + obj_suffix
            obj_path = os.path.join( build_dir, rel_obj_path )
            if not rel_obj_path in prebuilt_objects:
                prebuilt_objects[rel_obj_path] = obj_builder( obj_path, source )
            objects.append( prebuilt_objects[rel_obj_path] )

        if not linktype in prebuilt_libraries:
            library = lib_builder( library_name, objects, final_dir = final_dir )
            if linktype == "shared":
                library = env.Install( env['abs_final_dir'], library )
            prebuilt_libraries[linktype] = library
        else:
            logger.trace( "using existing library = [{}]".format( str(prebuilt_libraries[linktype]) ) )

        return prebuilt_libraries[linktype]
Пример #13
0
 def _add_to_test_suites( cls, test_suites, test_case ):
     logger.trace( "test_case = [{}]".format( as_notice( str(test_case) ) ) )
     suite = test_case['suite']
     if not suite in test_suites:
         test_suites[suite] = {}
         cls._initialise_test_suite( suite, test_suites[suite] )
     test_suite = test_suites[suite]
     test_suite['test_cases'].append( test_case )
     cls._update_summary_stats( test_suite, test_case )
Пример #14
0
 def _add_to_test_suites(cls, test_suites, test_case):
     logger.trace("test_case = [{}]".format(as_notice(str(test_case))))
     suite = test_case['suite']
     if not suite in test_suites:
         test_suites[suite] = {}
         cls._initialise_test_suite(suite, test_suites[suite])
     test_suite = test_suites[suite]
     test_suite['test_cases'].append(test_case)
     cls._update_summary_stats(test_suite, test_case)
Пример #15
0
def add_dependent_libraries(boost, linktype, libraries):
    version = boost.numeric_version()
    patched_test = boost._patched_test
    required_libraries = set(libraries)

    logger.trace("Required Library Set = [{}]".format(
        colour_items([l for l in required_libraries])))

    for library in libraries:
        if library in boost_libraries_with_no_dependencies():
            continue
        elif library == 'chrono':
            required_libraries.update(['system'])
        elif library == 'coroutine':
            required_libraries.update(['context', 'system'])
            if version > 1.55:
                required_libraries.update(['thread'])
            if linktype == 'shared':
                required_libraries.update(['chrono'])
        elif library == 'filesystem':
            required_libraries.update(['system'])
        elif library == 'graph':
            required_libraries.update(['regex'])
        elif library == 'locale':
            required_libraries.update(['filesystem', 'system', 'thread'])
        elif library == 'log':
            required_libraries.update(
                ['date_time', 'filesystem', 'system', 'thread'])
        elif library == 'log_setup':
            required_libraries.update(
                ['log', 'date_time', 'filesystem', 'system', 'thread'])
        elif library in {
                'test', 'prg_exec_monitor', 'test_exec_monitor',
                'unit_test_framework'
        }:
            if library == 'test' and 'test' in required_libraries:
                required_libraries.remove('test')
                required_libraries.update(['unit_test_framework'])
            if patched_test:
                required_libraries.update(['timer, chrono', 'system'])
        elif library == 'timer':
            required_libraries.update(['chrono', 'system'])

    libraries = []

    for library in boost_dependency_order():
        if library in required_libraries:
            libraries.append(library)

    for library in required_libraries:
        if library not in boost_dependency_set():
            libraries.append(library)

    return libraries
Пример #16
0
    def __call__(self, target, source, env):
        base_name = self._base_name(source, env)
        target = []
        if not callable(self._command):
            target.append(stdout_file_name_from(base_name))
            target.append(stderr_file_name_from(base_name))
        target.append(success_file_name_from(base_name))
        if self._targets:
            for t in self._targets:
                target.append(t)

        logger.trace("targets = {}".format(str([str(t) for t in target])))
        return target, source
Пример #17
0
 def remote_branch_exists(cls, repository, branch):
     command = "{git} ls-remote --heads {repository} {branch}".format(
         git=cls.binary(), repository=repository, branch=branch)
     result = cls.execute_command(command)
     if result:
         for line in result.splitlines():
             if line.startswith("warning: redirecting"):
                 logger.trace("Ignoring redirection warning and proceeding")
             elif branch in line:
                 logger.trace("Branch {branch} found in {line}".format(
                     branch=as_info(branch), line=as_notice(line)))
                 return True
     return False
Пример #18
0
def relative_start(env, start, default):

    start, base_path = clean_start(env, start, default)

    rel_start = os.path.relpath(base_path, start)

    logger.trace(
        "paths: start = [{}], base_path = [{}], rel_start = [{}]".format(
            as_notice(start), as_notice(base_path), as_notice(rel_start)))

    if not os.path.isabs(start):
        start = rel_start

    return start, rel_start, base_path
Пример #19
0
def relative_start( env, start, default ):

    start, base_path = clean_start( env, start, default )

    rel_start = os.path.relpath( base_path, start )

    logger.trace(
            "paths: start = [{}], base_path = [{}], rel_start = [{}]"
            .format( as_notice( start ), as_notice( base_path ), as_notice( rel_start ) )
        )

    if not os.path.isabs( start ):
        start = rel_start

    return start, rel_start, base_path
Пример #20
0
    def remote_default_branch(cls, repository):
        command = "{git} ls-remote --symref {repository} HEAD".format(
            git=cls.binary(), repository=repository)
        result = cls.execute_command(command)

        if result:
            branch_pattern = r'ref[:]\s+refs/heads/(?P<default_branch>[^\s]+)\s+HEAD'
            match = re.search(branch_pattern, result)
            logger.trace(
                "When searching for default branch name for repoistory [{}] using regex [{}] the following match [{}] was returned"
                .format(as_info(repository), as_notice(branch_pattern),
                        as_info(str(match))))
            if match:
                return match.group('default_branch')
        return None
Пример #21
0
 def read( self, block ):
     try:
         logger.trace( "Stream _queue.get [{}]".format( self._name ) )
         text = self._queue.get( block )
         if text:
             for line in text.splitlines():
                 if self._processor:
                     line = self._processor( line )
                     if line:
                         print line
                 else:
                     print line
         self._queue.task_done()
     except Queue.Empty:
         logger.trace( "Stream Queue.Empty raised [{}]".format( self._name ) )
Пример #22
0
 def read( self, block ):
     try:
         logger.trace( "Stream _queue.get [{}]".format( self._name ) )
         text = self._queue.get( block )
         if text:
             for line in text.splitlines():
                 if self._processor:
                     line = self._processor( line )
                     if line:
                         print( line )
                 else:
                     print( line )
         self._queue.task_done()
     except Queue.Empty:
         logger.trace( "Stream Queue.Empty raised [{}]".format( self._name ) )
Пример #23
0
def add_dependent_libraries( boost, linktype, libraries ):
    version = boost.numeric_version()
    patched_test = boost._patched_test
    required_libraries = set( libraries )

    logger.trace( "Required Library Set = [{}]".format( colour_items( [l for l in required_libraries] ) ) )

    for library in libraries:
        if library in boost_libraries_with_no_dependencies():
            continue
        elif library == 'chrono':
            required_libraries.update( ['system'] )
        elif library == 'coroutine':
            required_libraries.update( ['context', 'system'] )
            if version > 1.55:
                required_libraries.update( ['thread'] )
            if linktype == 'shared':
                required_libraries.update( ['chrono'] )
        elif library == 'filesystem':
            required_libraries.update( ['system'] )
        elif library == 'graph':
            required_libraries.update( ['regex'] )
        elif library == 'locale':
            required_libraries.update( ['filesystem', 'system', 'thread'] )
        elif library == 'log':
            required_libraries.update( ['date_time', 'filesystem', 'system', 'thread'] )
        elif library == 'log_setup':
            required_libraries.update( ['log', 'date_time', 'filesystem', 'system', 'thread'] )
        elif library in { 'test', 'prg_exec_monitor', 'test_exec_monitor', 'unit_test_framework' }:
            if library == 'test' and 'test' in required_libraries:
                required_libraries.remove( 'test' )
                required_libraries.update( ['unit_test_framework'] )
            if patched_test:
                required_libraries.update( ['timer, chrono', 'system'] )
        elif library == 'timer':
            required_libraries.update( ['chrono', 'system'] )

    libraries = []

    for library in boost_dependency_order():
        if library in required_libraries:
            libraries.append( library )

    for library in required_libraries:
        if library not in boost_dependency_set():
            libraries.append( library )

    return libraries
Пример #24
0
def lazy_update_library_list( env, emitting, libraries, built_libraries, add_dependents, linktype, boost, stage_dir ):

    if add_dependents:
        if not emitting:
            libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) )
        else:
            libraries = add_dependent_libraries( boost, linktype, libraries )

    if not stage_dir in built_libraries:
        logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        built_libraries[ stage_dir ] = set( libraries )
    else:
        logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        libraries = [ l for l in libraries if l not in built_libraries[ stage_dir ] ]

    return libraries
Пример #25
0
    def __call__(self, env, target, source, match=None, exclude=None):
        destination = target
        if destination[0] != '#' and not os.path.isabs(destination):
            destination = os.path.join(env['abs_final_dir'], destination)

        filtered_nodes = filter_nodes(source, match, exclude)

        if filtered_nodes:

            logger.trace("filtered_nodes = [{}]".format(
                colour_items([str(n) for n in filtered_nodes])))

            installed_files = env.Install(destination, filtered_nodes)
            cuppa.progress.NotifyProgress.add(env, installed_files)
            return installed_files
        return []
Пример #26
0
def glob( start, file_pattern, exclude_dirs_pattern=None, discard_pattern=None ):

    if is_string( file_pattern ):
        file_pattern = re.compile( fnmatch.translate( file_pattern ) )

    if exclude_dirs_pattern:
        if is_string( exclude_dirs_pattern ):
            exclude_dirs_pattern = re.compile( fnmatch.translate( exclude_dirs_pattern ) )

    if discard_pattern:
        if is_string( discard_pattern ):
            discard_pattern = re.compile( fnmatch.translate( discard_pattern ) )

    matches = []
    subdir = False

    logger.trace( "file_pattern = [{}], start = [{}]".format( as_notice( file_pattern.pattern ), as_notice( start ) ) )

    for root, dirnames, filenames in os.walk( start ):

        if exclude_dirs_pattern:
            # remove any directories from the search that match the exclude regex
            dirnames[:] = [ d for d in dirnames if not exclude_dirs_pattern.match(d) ]

        exclude_this_dir = False
        matches_in_this_dir = []

        for filename in filenames:
            if subdir and discard_pattern and discard_pattern.match( filename ):
                # if we are in a subdir and it contains a file that matches the discard_pattern
                # set exclude_this_dir to True so later we can discard any local matches we'd
                # already encountered while walking the directory
                exclude_this_dir = True
                break
            if file_pattern.match( filename ):
                matches_in_this_dir.append( os.path.join( root, filename ) )

        if not exclude_this_dir:
            matches += matches_in_this_dir
        else:
            # We are excluding this directory and therefore all of its subdirs
            dirnames[:] = []

        # After the first pass through the loop we will be in a subdirectory
        subdir = True

    return matches
Пример #27
0
    def summary_name(cls, filename, toolchain_variant_dir, offset_dir,
                     sconscript_name):
        name = os.path.splitext(filename)[0]
        if name.startswith(coverage_index_marker):
            name = name.replace(coverage_index_marker, "")

        logger.trace(
            "filename = [{}], toolchain_variant_dir = [{}], offset_dir = [{}], sconscript_name = [{}]"
            .format(
                as_info(filename),
                as_notice(toolchain_variant_dir),
                as_info(offset_dir),
                as_info(sconscript_name),
            ))

        return "./{}/{}".format(offset_dir, sconscript_name and sconscript_name
                                or "*")
Пример #28
0
 def _base_name(self, source, env):
     if callable(self._command):
         name = self._command.__name__ + "_" + str(
             self.next_invocation_id(env))
         return os.path.join(self._final_dir, name)
     elif self._command:
         path = os.path.join(self._final_dir, self._name_from_command(env))
         path, name = os.path.split(path)
         name = unique_short_filename(name)
         logger.trace("Command = [{}], Unique Name = [{}]".format(
             as_notice(self._command), as_notice(name)))
         return os.path.join(path, name)
     else:
         program_file = str(source[0])
         if not program_file.startswith(self._final_dir):
             program_file = os.path.split(program_file)[1]
         return program_file
Пример #29
0
def filter_nodes(nodes, match_patterns, exclude_patterns=[]):

    nodes = Flatten(nodes)

    if not match_patterns and not exclude_patterns:
        return nodes

    if match_patterns:
        match_patterns = Flatten([match_patterns])
        for i, match_pattern in enumerate(match_patterns):
            if is_string(match_pattern):
                match_patterns[i] = re.compile(
                    fnmatch.translate(match_pattern))

    if exclude_patterns:
        exclude_patterns = Flatten([exclude_patterns])
        for i, exclude_pattern in enumerate(exclude_patterns):
            if is_string(exclude_pattern):
                exclude_patterns[i] = re.compile(
                    fnmatch.translate(exclude_pattern))

    filtered_nodes = []

    for node in nodes:
        path = str(node)
        logger.trace("Node in nodes to filter = [{}][{}]".format(
            as_notice(path), as_notice(node.path)))

        if exclude_patterns:
            excluded = False
            for exclude_pattern in exclude_patterns:
                if exclude_pattern.match(path):
                    excluded = True
                    break
            if excluded:
                continue

        if not match_patterns:
            filtered_nodes.append(node)
        else:
            for match_pattern in match_patterns:
                if match_pattern.match(path):
                    filtered_nodes.append(node)

    return filtered_nodes
Пример #30
0
    def __call__( self, target, source, env ):

        for library in self._libraries:
            filename = None
            if self._linktype == 'static':
                filename = static_library_name( env, library, self._toolchain, self._boost.version(), self._variant, self._threading )
            else:
                filename = shared_library_name( env, library, self._toolchain, self._boost.full_version(), self._variant, self._threading )

            built_library_path = os.path.join( self._location, self._stage_dir, 'lib', filename )

            logger.trace( "Emit Boost library [{}] to [{}]".format( as_notice(library), as_notice(built_library_path) ) )

            node = File( built_library_path )

            target.append( node )

        return target, source
Пример #31
0
    def __call__( self, env, pattern, start=default, exclude_dirs=default ):

        base_path = os.path.realpath( env['sconscript_dir'] )

        if start == self.default:
            start = base_path

        start = os.path.expanduser( start )

        rel_start = os.path.relpath( base_path, start )

        logger.trace(
            "paths: start = [{}], base_path = [{}], rel_start = [{}]"
            .format( as_notice( start ), as_notice( base_path ), as_notice( rel_start ) )
        )

        if not os.path.isabs( start ):
            start = rel_start

        if exclude_dirs == self.default:
            exclude_dirs = [ env['download_root'], env['build_root' ] ]

        exclude_dirs_regex = None

        if exclude_dirs:
            def up_dir( path ):
                element = next( e for e in path.split(os.path.sep) if e )
                return element == ".."
            exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ]
            exclude_dirs = "|".join( exclude_dirs )
            exclude_dirs_regex = re.compile( exclude_dirs )

        matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex )

        logger.trace(
            "matches = [{}]."
            .format( colour_items( [ str(match) for match in matches ] ) )
        )

        make_relative = True
        if rel_start.startswith( os.pardir ):
            make_relative = False

        logger.trace( "make_relative = [{}].".format( as_notice( str(make_relative) ) ) )

        nodes = [ env.File( make_relative and os.path.relpath( match, base_path ) or match ) for match in matches ]

        logger.trace(
            "nodes = [{}]."
            .format( colour_items( [ str(node) for node in nodes ] ) )
        )

        return nodes
Пример #32
0
    def windows_spawn( self, sh, escape, cmd, args, env ):

        processor = SpawnedProcessor( self.scons_env )

        stdout = Stream( processor, "stdout" )
        stderr = Stream( processor, "stderr" )

        pspawn = PSpawn( _pspawn, sh, escape, cmd, args, env, stdout, stderr )

        pspawn_thread = threading.Thread( target=pspawn )

        finished = threading.Event()
        pspawn_thread.start()

        stdout_thread = threading.Thread( target = Reader( stdout, finished ) )
        stdout_thread.start()

        stderr_thread = threading.Thread( target = Reader( stderr, finished ) )
        stderr_thread.start()

        pspawn_thread.join()
        logger.trace( "Processor - PSPAWN joined" )
        finished.set()

        stdout.join()
        logger.trace( "Processor - STDOUT stream joined" )
        stdout_thread.join()
        logger.trace( "Processor - STDOUT thread joined" )

        stderr.join()
        logger.trace( "Processor - STDERR stream joined" )
        stderr_thread.join()
        logger.trace( "Processor - STDERR thread joined" )

        returncode = pspawn.returncode()

        summary = processor.summary( returncode )

        if summary:
            print( summary )

        return returncode
Пример #33
0
    def windows_spawn( self, sh, escape, cmd, args, env ):

        processor = SpawnedProcessor( self.scons_env )

        stdout = Stream( processor, "stdout" )
        stderr = Stream( processor, "stderr" )

        pspawn = PSpawn( _pspawn, sh, escape, cmd, args, env, stdout, stderr )

        pspawn_thread = threading.Thread( target=pspawn )

        finished = threading.Event()
        pspawn_thread.start()

        stdout_thread = threading.Thread( target = Reader( stdout, finished ) )
        stdout_thread.start()

        stderr_thread = threading.Thread( target = Reader( stderr, finished ) )
        stderr_thread.start()

        pspawn_thread.join()
        logger.trace( "Processor - PSPAWN joined" )
        finished.set()

        stdout.join()
        logger.trace( "Processor - STDOUT stream joined" )
        stdout_thread.join()
        logger.trace( "Processor - STDOUT thread joined" )

        stderr.join()
        logger.trace( "Processor - STDERR stream joined" )
        stderr_thread.join()
        logger.trace( "Processor - STDERR thread joined" )

        returncode = pspawn.returncode()

        summary = processor.summary( returncode )

        if summary:
            print summary

        return returncode
Пример #34
0
def filter_nodes( nodes, match_patterns, exclude_patterns=[] ):

    nodes = Flatten( nodes )

    if not match_patterns and not exclude_patterns:
        return nodes

    if match_patterns:
        match_patterns = Flatten( [ match_patterns ] )
        for i, match_pattern in enumerate(match_patterns):
            if is_string( match_pattern ):
                match_patterns[i] = re.compile( fnmatch.translate( match_pattern ) )

    if exclude_patterns:
        exclude_patterns = Flatten( [ exclude_patterns ] )
        for i, exclude_pattern in enumerate(exclude_patterns):
            if is_string( exclude_pattern ):
                exclude_patterns[i] = re.compile( fnmatch.translate( exclude_pattern ) )

    filtered_nodes = []

    for node in nodes:
        path = str( node )
        logger.trace( "Node in nodes to filter = [{}][{}]".format( as_notice(path), as_notice(node.path) ) )

        if exclude_patterns:
            excluded = False
            for exclude_pattern in exclude_patterns:
                if exclude_pattern.match( path ):
                    excluded = True
                    break
            if excluded:
                continue

        if not match_patterns:
            filtered_nodes.append( node )
        else:
            for match_pattern in match_patterns:
                if match_pattern.match( path ):
                    filtered_nodes.append( node )

    return filtered_nodes
Пример #35
0
    def add_toolchains( self, env ):
        toolchains = self.toolchains_key
        cuppa.modules.registration.add_to_env( toolchains, env, env.add_available_toolchain, env.add_supported_toolchain )

        logger.trace( "supported toolchains are [{}]".format(
                colour_items( env["supported_toolchains"] )
        ) )
        logger.info( "available toolchains are [{}]".format(
                colour_items( sorted( env[toolchains].keys(), reverse=True ), as_info )
        ) )

        SCons.Script.AddOption(
            '--toolchains',
            type     = 'string',
            nargs    = 1,
            action   = 'callback',
            callback = ParseToolchainsOption( env['supported_toolchains'], env[toolchains].keys() ),
            help     = 'The Toolchains you wish to build against. A comma separate list with wildcards'
                       ' may be provided. For example --toolchains=gcc*,clang37,clang36'
        )
Пример #36
0
    def __call__( self, env, libraries ):

        if not self._add_dependents:
            logger.warn( "BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']( env )

        logger.trace( "Build static libraries [{}]".format( colour_items( libraries ) ) )

        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'static' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Пример #37
0
    def get_branch(cls, path):
        branch = None
        try:
            result = cls.execute_command(
                "{git} symbolic-ref HEAD".format(git=cls.binary()), path)
            branch = result.replace("refs/heads/", "").strip()
            logger.trace("Branch (using symbolic-ref) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
            return branch
        except cls.Error:
            pass

        # In case we have a detached head we can fallback to this
        result = cls.execute_command(
            "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()), path)
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                branch = branches[-2].split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch
Пример #38
0
    def __init__(self, env, stage_dir, libraries, add_dependents, linktype,
                 boost, verbose_build, verbose_config):

        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['action']:
            _prebuilt_boost_libraries['action'][sconstruct_id] = {}

        logger.trace(
            "Current Boost build [{}] has the following build variants [{}]".
            format(
                as_info(sconstruct_id),
                colour_items(_prebuilt_boost_libraries['action']
                             [sconstruct_id].keys())))

        logger.debug("Requested libraries [{}]".format(
            colour_items(libraries)))

        self._linktype = linktype
        self._variant = variant_name(self._env['variant'].name())
        self._target_arch = env['target_arch']
        self._toolchain = env['toolchain']
        self._stage_dir = stage_dir

        self._libraries = _lazy_update_library_list(
            env, False, libraries,
            _prebuilt_boost_libraries['action'][sconstruct_id], add_dependents,
            linktype, boost, self._stage_dir)

        logger.debug("Required libraries [{}]".format(
            colour_items(self._libraries)))

        self._location = boost.local()
        self._verbose_build = verbose_build
        self._verbose_config = verbose_config
        self._job_count = env['job_count']
        self._parallel = env['parallel']
        self._threading = True
Пример #39
0
    def __call__(self, target, source, env):

        logger.trace("target = {}".format(
            colour_items([str(t) for t in target])))
        logger.trace("source = {}".format(
            colour_items([str(s) for s in source])))

        cuppa.path.lazy_create_path(
            os.path.join(env['base_path'], env['build_dir']))

        self._target = target

        # Each source will result in one or more targets so we need to slice the targets to pick up
        # the gcov target (the first one) before we perform the zip iteration
        for s, t in zip(
                source,
                itertools.islice(target, 0, None,
                                 len(target) // len(source))):

            gcov_path = os.path.splitext(os.path.splitext(t.path)[0])[0]
            gcov_log = t.path
            logger.trace("gcov_path = [{}]".format(as_notice(str(gcov_path))))
            self._run_gcov(env, s.path, gcov_path, gcov_log)

        target = self._target

        return None
Пример #40
0
    def __call__(self, target, source, env):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
        else:
            destination = self._destination + destination_subdir(env)

        master_index = env.File(
            os.path.join(self._destination, "test-report-index.json"))
        master_report = env.File(
            os.path.join(self._destination, "test-report-index.json"))

        env.Clean(source, master_index)
        env.Clean(source, master_report)

        ReportIndexBuilder.register_report_folders(
            final_dir=env['abs_final_dir'], destination_dir=self._destination)

        for html_report, json_report in zip(*[iter(source)] * 2):
            target.append(
                os.path.join(destination,
                             os.path.split(str(html_report))[1]))
            json_report_target = env.File(
                os.path.join(destination,
                             os.path.split(str(json_report))[1]))
            target.append(json_report_target)
            ReportIndexBuilder.update_index(
                json_report_target,
                os.path.split(json_report_target.abspath)[0])

        logger.trace("sources = [{}]".format(
            colour_items([str(s) for s in source])))
        logger.trace("targets = [{}]".format(
            colour_items([str(t) for t in target])))

        env.Depends(master_report, target)
        env.Depends(master_index, target)

        return target, source
Пример #41
0
    def retrieve_repo_info( cls, vcs_system, vcs_directory, expected_vc_type ):
        if not expected_vc_type or expected_vc_type == vcs_system.vc_type():
            try:
                logger.trace( "expected_vc_type=[{expected_vc_type}], vcs_system=[{vc_type}], vcs_directory=[{directory}]".format(
                        expected_vc_type=as_info( str(expected_vc_type) ),
                        vc_type=as_info( vcs_system and vcs_system.vc_type() or "None" ),
                        directory=as_notice( str(vcs_directory) )
                ) )

                info = vcs_system.info( vcs_directory )

                logger.trace( "vcs_info=[{vcs_info}]".format( vcs_info=as_info(str(info)) ) )

                return info
            except vcs_system.Error as ex:
                if expected_vc_type:
                    logger.error( "Failed to retreive info for [{}] because [{}]".format(
                            as_error( vcs_directory ),
                            as_error( str(ex) )
                    ) )
                    raise
                return None
Пример #42
0
    def GenerateHtmlTestReport(self, target, source, env):

        self._base_uri = ""
        if self._auto_link_tests:
            self._base_uri = initialise_test_linking(
                env, link_style=self._link_style)

        # Each source will result in one or more targets so we need to slice the targets to pick up
        # the gcov target (the first one) before we perform the zip iteration
        for s, t in zip(
                source,
                itertools.islice(target, 0, None,
                                 len(target) // len(source))):
            test_suites = {}

            logger.trace("source = [{}]".format(as_info(str(s))))
            logger.trace("target = [{}]".format(as_info(str(t))))

            test_cases = self._read(s.abspath)
            for test_case in test_cases:

                if not 'assertions_count' in test_case:
                    if 'assertions' in test_case:
                        test_case['assertions_count'] = test_case['assertions']
                        test_case['assertions_passed'] = test_case['passed']
                        test_case['assertions_failed'] = test_case['failed']
                        test_case['assertions_aborted'] = test_case['aborted']
                    else:
                        test_case['assertions_count'] = 0
                        test_case['assertions_passed'] = 0
                        test_case['assertions_failed'] = 0
                        test_case['assertions_aborted'] = 0

                self._add_to_test_suites(test_suites, test_case)
            self._write(str(t),
                        env,
                        test_suites,
                        sort_test_cases=self._sort_test_cases)
        return None
Пример #43
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ):

        self._env = env

        logger.trace( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._linktype       = linktype
        self._variant        = variant_name( self._env['variant'].name() )
        self._target_arch    = env['target_arch']
        self._toolchain      = env['toolchain']
        self._stage_dir      = stage_dir

        self._libraries = lazy_update_library_list( env, False, libraries, self._built_libraries, add_dependents, linktype, boost, self._stage_dir )

        logger.trace( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location       = boost.local()
        self._version        = boost.numeric_version()
        self._full_version   = boost.full_version()
        self._verbose_build  = verbose_build
        self._verbose_config = verbose_config
        self._job_count      = env['job_count']
        self._parallel       = env['parallel']
Пример #44
0
    def __call__(self, target, source, env):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
            env.Clean(source,
                      os.path.join(self._destination, "coverage-index.html"))
        else:
            env.Clean(source,
                      os.path.join(self._destination, "coverage-index.html"))
            destination = self._destination + destination_subdir(env)

        files_node = next(
            (s for s in source if os.path.splitext(str(s))[1] == ".cov_files"),
            None)
        if files_node:
            variant_index_file = os.path.join(env['abs_final_dir'],
                                              coverage_index_name_from(env))
            target.append(variant_index_file)
            env.Clean(
                source,
                os.path.join(destination,
                             os.path.split(variant_index_file)[1]))

            variant_summary_file = os.path.splitext(
                variant_index_file)[0] + ".log"
            target.append(variant_summary_file)

            CoverageIndexBuilder.register_coverage_folders(
                final_dir=env['abs_final_dir'],
                destination_dir=self._destination)

        logger.trace("sources = [{}]".format(
            colour_items([str(s) for s in source])))
        logger.trace("targets = [{}]".format(
            colour_items([str(t) for t in target])))

        return target, source
Пример #45
0
    def get_branch( cls, path ):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = cls.execute_command( "{git} show -s --pretty=\%d HEAD".format( git=cls.binary() ), path )
        match = re.search( r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result )
        if match:
            branches = [ b.strip() for b in match.group("branches").split(',') ]
            logger.trace( "Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches) ) )
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ','/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace( "Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch) ) )
        else:
            logger.warn( "No branch found from [{}]".format( result ) )

        return branch, remote
Пример #46
0
    def __init__( self, cuppa_env, location, branch=None, extra_sub_path=None, name_hint=None ):

        self._location   = os.path.expanduser( location )
        self._full_url   = urlparse.urlparse( self._location )
        self._sub_dir    = None
        self._name_hint  = name_hint

        if extra_sub_path:
            if os.path.isabs( extra_sub_path ):
                raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) )
            else:
                self._sub_dir = os.path.normpath( extra_sub_path )

        ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to
        ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source
        ## once this is done
        local_directory = self.get_local_directory( cuppa_env, self._location, self._sub_dir, branch, self._full_url )

        logger.trace( "Local Directory returned as [{}]".format(
                as_notice( local_directory )
        ) )

        self._base_local_directory = local_directory
        self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory

        ## Now that we have a locally accessible version of the dependency we can try to collate some information
        ## about it to allow us to specify what we are building with.
        self._url, self._repository, self._branch, self._revision = self.get_info( self._location, self._local_directory, self._full_url )
        self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path )

        logger.debug( "Using [{}]{} at [{}] stored in [{}]".format(
                as_info( location ),
                ( branch and  ":[{}]".format( as_info(  str(branch) ) ) or "" ),
                as_info( self._version ),
                as_notice( self._local_directory )
        ) )
Пример #47
0
    def __call__( self, env, source, **kwargs ):
        sources = Flatten( [ source ] )
        objects = []
        if 'CPPPATH' in env:
            env.AppendUnique( INCPATH = env['CPPPATH'] )

        if self._shared:
            obj_prefix = env.subst('$SHOBJPREFIX')
            obj_suffix = env.subst('$SHOBJSUFFIX')
            obj_builder = env.SharedObject
        else:
            obj_prefix = env.subst('$OBJPREFIX')
            obj_suffix = env.subst('$OBJSUFFIX')
            obj_builder = env.Object

        logger.trace( "Build Root = [{}]".format( as_notice( env['build_root'] ) ) )

        for source in sources:
            if not isinstance( source, Node ):
                source = env.File( source )

            logger.trace( "Object source = [{}]/[{}]".format( as_notice(str(source)), as_notice(source.path) ) )

            if os.path.splitext(str(source))[1] == obj_suffix:
                objects.append( source )
            else:
                target = None
                target = os.path.splitext( os.path.split( str(source) )[1] )[0]
                if not source.path.startswith( env['build_root'] ):
                    if os.path.isabs( str(source) ):
                        target = env.File( os.path.join( obj_prefix + target + obj_suffix ) )
                    else:
                        target = env.File( os.path.join( env['build_dir'], obj_prefix + target + obj_suffix ) )
                else:
                    offset_dir = os.path.relpath( os.path.split( source.path )[0], env['build_dir'] )
                    target = env.File( os.path.join( offset_dir, obj_prefix + target + obj_suffix ) )

                logger.trace( "Object target = [{}]/[{}]".format( as_notice(str(target)), as_notice(target.path) ) )

                objects.append(
                    obj_builder(
                        target = target,
                        source = source,
                        CPPPATH = env['SYSINCPATH'] + env['INCPATH'],
                        **kwargs ) )

        cuppa.progress.NotifyProgress.add( env, objects )

        return objects
Пример #48
0
    def __call__( self, env, pattern, start=default, exclude_dirs=default ):

        start, rel_start, base_path = relative_start( env, start, self.default )

        if exclude_dirs == self.default:
            exclude_dirs = [ env['download_root'], env['build_root' ] ]

        exclude_dirs_regex = None

        if exclude_dirs:
            def up_dir( path ):
                element = next( e for e in path.split(os.path.sep) if e )
                return element == ".."
            exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ]
            exclude_dirs = "|".join( exclude_dirs )
            exclude_dirs_regex = re.compile( exclude_dirs )

        matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex )

        logger.trace(
            "matches = [{}]."
            .format( colour_items( [ str(match) for match in matches ] ) )
        )

        make_relative = True
        if rel_start.startswith( os.pardir ):
            make_relative = False

        logger.trace( "make_relative = [{}].".format( as_notice( str(make_relative) ) ) )

        nodes = [ env.File( make_relative and os.path.relpath( match, base_path ) or match ) for match in matches ]

        logger.trace(
            "nodes = [{}]."
            .format( colour_items( [ str(node) for node in nodes ] ) )
        )

        return nodes
Пример #49
0
    def __call__( self, env, target, source, libraries, linktype ):

        variant      = variant_name( env['variant'].name() )
        target_arch  = env['target_arch']
        toolchain    = env['toolchain']
        stage_dir    = stage_directory( toolchain, variant, target_arch, toolchain.abi_flag(env) )

        library_action  = BoostLibraryAction ( env, stage_dir, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config )
        library_emitter = BoostLibraryEmitter( env, stage_dir, libraries, self._add_dependents, linktype, self._boost )

        logger.trace( "env = [{}]".format( as_info( env['build_dir'] ) ) )

        env.AppendUnique( BUILDERS = {
            'BoostLibraryBuilder' : env.Builder( action=library_action, emitter=library_emitter )
        } )

        bjam_exe = 'bjam'
        if platform.system() == "Windows":
            bjam_exe += ".exe"
        bjam_target = os.path.join( self._boost.local(), bjam_exe )
        bjam = env.Command( bjam_target, [], BuildBjam( self._boost ) )
        env.NoClean( bjam )

        built_libraries = env.BoostLibraryBuilder( target, source )

        built_library_map = {}
        for library in built_libraries:
            # Extract the library name from the library filename.
            # Possibly use regex instead?
            name = os.path.split( str(library) )[1]
            name = name.split( "." )[0]
            name = name.split( "-" )[0]
            name = "_".join( name.split( "_" )[1:] )

            built_library_map[name] = library

        logger.trace( "Built Library Map = [{}]".format( colour_items( built_library_map.keys() ) ) )

        variant_key = stage_dir

        logger.trace( "Source Libraries Variant Key = [{}]".format( as_notice( variant_key ) ) )

        if not variant_key in self._library_sources:
             self._library_sources[ variant_key ] = {}

        logger.trace( "Variant sources = [{}]".format( colour_items( self._library_sources[ variant_key ].keys() ) ) )

        required_libraries = add_dependent_libraries( self._boost, linktype, libraries )

        logger.trace( "Required libraries = [{}]".format( colour_items( required_libraries ) ) )

        for library in required_libraries:
            if library in self._library_sources[ variant_key ]:

                logger.trace( "Library [{}] already present in variant [{}]".format( as_notice(library), as_info(variant_key) ) )

                if library not in built_library_map:
                    logger.trace( "Add Depends for [{}]".format( as_notice( self._library_sources[ variant_key ][library].path ) ) )
                    env.Depends( built_libraries, self._library_sources[ variant_key ][library] )
            else:
                self._library_sources[ variant_key ][library] = built_library_map[library]

        logger.trace( "Library sources for variant [{}] = [{}]".format(
                as_info(variant_key),
                colour_items( k+":"+as_info(v.path) for k,v in self._library_sources[ variant_key ].iteritems() )
        ) )

        if built_libraries:

            env.Requires( built_libraries, bjam )

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam" )
                toolset_config_jam = env.Command( toolset_target, [], WriteToolsetConfigJam() )

                project_config_target = os.path.join( self._boost.local(), "project-config.jam" )
                if not os.path.exists( project_config_target ):
                    project_config_jam = env.Requires( project_config_target, env.AlwaysBuild( toolset_config_jam ) )
                    env.Requires( built_libraries, project_config_jam )

                env.Requires( built_libraries, toolset_config_jam )

        install_dir = env['abs_build_dir']

        if linktype == 'shared':
            install_dir = env['abs_final_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug( "Install Boost library [{}:{}] to [{}]".format( as_notice(library), as_info(str(self._library_sources[ variant_key ][library])), as_notice(install_dir) ) )

            library_node = self._library_sources[ variant_key ][library]

            logger.trace( "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]".format(
                    as_notice(library_node.path),
                    as_notice(str(library_node)),
                    as_notice(str(library_node.get_binfo().bact) ),
                    as_notice(str(library_node.get_state()) )
            ) )

            installed_library = env.CopyFiles( install_dir, self._library_sources[ variant_key ][library] )

            installed_libraries.append( installed_library )

        logger.debug( "Boost 'Installed' Libraries = [{}]".format( colour_items( l.path for l in Flatten( installed_libraries ) ) ) )

        return Flatten( installed_libraries )
Пример #50
0
 def update_index( cls, json_report, destination ):
     logger.trace( "add destination = [{}]".format( as_notice(destination) ) )
     if not destination in cls.all_reports:
         cls.all_reports[ destination ] = []
     cls.all_reports[ destination ].append( json_report )
Пример #51
0
    def update( self, env, project, toolchain, variant, build_root, working_dir, final_dir_offset ):

        logger.debug( "Update project [{}] for [{}, {}]".format( as_notice( project ), as_notice( toolchain) , as_notice( variant ) ) )
        logger.trace( "Update project [{}] working_dir [{}], final_dir [{}]".format( as_notice( project ), as_notice( working_dir) , as_notice( final_dir_offset ) ) )

        if project not in self._projects:

            title = os.path.splitext( project )[0]
            directory, filename = os.path.split( title )
            cbs_file_name = filename
            if cbs_file_name in [ 'sconscript', 'SConscript', 'Sconscript' ]:
                cbs_file_name = os.path.split( directory )[1]
                if cbs_file_name == ".":
                    cbs_file_name = os.path.split( os.path.abspath( env['sconscript_dir'] ) )[1]
                    if not cbs_file_name:
                        cbs_file_name = "sconscript"

            if not self._place_cbs_by_sconscript:
                directory = env['working_dir']
            directory = os.path.join( directory, "cbs")
            project_file = directory + os.path.sep + cbs_file_name + ".cbp"

            execution_dir = ''
            if directory:
                execution_dir = os.path.relpath( os.getcwd(), directory )
                execution_dir = (   os.path.pardir
                                  + os.path.sep
                                  + os.path.join( execution_dir,
                                                  os.path.split( os.path.abspath( os.getcwd() ) )[1] ) )

            self._projects[project] = {}
            self._projects[project]['title']         = title
            self._projects[project]['directory']     = directory
            self._projects[project]['path']          = os.path.join( os.getcwd(), directory )
            self._projects[project]['execution_dir'] = execution_dir
            self._projects[project]['project_file']  = project_file
            self._projects[project]['variants']      = set()
            self._projects[project]['toolchains']    = set()
            self._projects[project]['files']         = set()
            self._projects[project]['targets']       = {}
            self._projects[project]['lines_header']  = []
            self._projects[project]['lines_footer']  = []

        if not self._projects[project]['lines_header']:
            self._projects[project]['lines_header'] = self.create_header( self._projects[project]['title'],
                                                                          self._projects[project]['execution_dir'] )

        if not self._projects[project]['lines_footer']:
            self._projects[project]['lines_footer'] = self.create_footer()

        self._projects[project]['variants'].add( variant )
        self._projects[project]['toolchains'].add( toolchain )

        working_dir_path = os.path.join( self._projects[project]['execution_dir'], working_dir )

        final_dir_path = os.path.normpath( os.path.join( working_dir_path, final_dir_offset ) )

        target = "{}-{}".format( toolchain, variant )

        test_actions = [ "", "--test" ]

        for action in test_actions:

            target_name = target + action

            if target_name not in self._projects[project]['targets']:
                self._projects[project]['targets'][target_name] = self.create_target(
                        target_name,
                        project,
                        toolchain,
                        variant,
                        action,
                        working_dir_path,
                        final_dir_path )
Пример #52
0
    def on_progress( cls, progress, sconscript, variant, env, target, source ):
        if progress == 'sconstruct_end':

            logger.trace( "Destination dirs = [{}]".format( colour_items( cls.destination_dirs.keys() ) ) )
            logger.trace( "cls.all_reports dirs = [{}]".format( colour_items( cls.all_reports.keys() ) ) )

            for destination_dir, final_dirs in cls.destination_dirs.iteritems():

                master_index_path = os.path.join( destination_dir, "test-report-index.html" )
                master_report_path = os.path.join( destination_dir, "test-report-index.json" )

                logger.debug( "Master test report index path = [{}]".format( as_notice( master_index_path ) ) )

                template = cls.get_template()

                summaries = {}
                summaries['vcs_info'] = initialise_test_linking( env, link_style="raw" )
                summaries['name'] = str(env.Dir(destination_dir)) + "/*"
                summaries['title'] = summaries['vcs_info'][0]
                summaries['branch'] = summaries['vcs_info'][2]
                summaries['commit'] = summaries['vcs_info'][4]
                summaries['uri'] = summaries['vcs_info'][0]
                summaries['toolchain_variants'] = {}
                summaries['reports'] = {}

                for report_dir, json_reports in cls.all_reports.iteritems():
                    common, tail1, tail2 = cuppa.path.split_common( report_dir, destination_dir )
                    logger.trace( "common, tail1, tail2 = {}, {}, {}".format( as_info(common), as_notice(tail1), as_notice(tail2) ) )
                    if common and (not tail1 or not tail2):

                        for json_report in json_reports:

                            summary = CollateReportIndexAction._read( str(json_report) )

                            toolchain_variant = summary['toolchain_variant_dir']

                            cls._update_toolchain_variant_summary( summaries, toolchain_variant, summary )

                            summary_name = summary['name']

                            if not summary_name in summaries['reports']:
                                summaries['reports'][summary_name] = {}
                                summaries['reports'][summary_name]['variants'] = {}

                            summaries['reports'][summary_name]['variants'][toolchain_variant] = summary

                report_list = summaries['reports'].items()
                report_list.sort()

                for name, report in report_list:
                    report['default_variant'] = None
                    report['default_summary_rel_path'] = None
                    variant_count = 0
                    status_rank = 0
                    for variant in report['variants'].itervalues():
                        variant_count += 1
                        index = cls._ranked_status().index(variant['status'])
                        if index > status_rank:
                            status_rank = index
                        if not report['default_variant']:
                            report['default_variant'] = variant['toolchain_variant_dir']
                            report['default_summary_rel_path'] = variant['summary_rel_path']

                    report['variant_count'] = variant_count
                    report['status'] = cls._ranked_status()[status_rank]
                    report['selector'] = GenerateHtmlReportBuilder._selector_from_name( name )
                    report['style'] = GenerateHtmlReportBuilder._status_bootstrap_style( report['status'] )
                    report['text_colour'] = GenerateHtmlReportBuilder._status_bootstrap_text_colour( report['status'] )

                summaries_json_report = json.dumps(
                    summaries,
                    sort_keys = True,
                    indent = 4,
                    separators = (',', ': ')
                )

                logger.trace( "summaries = \n{}".format( summaries_json_report ) )

                with open( master_report_path, 'w' ) as master_report_file:
                    master_report_file.write( summaries_json_report )

                with open( master_index_path, 'w' ) as master_index_file:
                    master_index_file.write(
                        template.render(
                            summaries=summaries,
                            report_list=report_list,
                            next=next,
                            len=len
                        ).encode('utf-8')
                    )
Пример #53
0
 def write( self, text ):
     logger.trace( "Stream _queue.put [{}]".format( self._name ) )
     self._queue.put( text )
Пример #54
0
 def join( self ):
     if self._queue.empty():
         logger.trace( "Stream _queue.empty() - flush with None [{}]".format( self._name ) )
         self._queue.put( None )
     self._queue.join()
Пример #55
0
    def __init__( self, cuppa_env, location, develop=None, branch=None, extra_sub_path=None, name_hint=None ):

        logger.debug( "Create location using location=[{}], develop=[{}], branch=[{}], extra_sub_path=[{}], name_hint=[{}]".format(
                as_info( location ),
                as_info( str(develop) ),
                as_info( str(branch) ),
                as_info( str(extra_sub_path) ),
                as_info( str(name_hint) )
        ) )

        location = self.replace_sconstruct_anchor( location, cuppa_env )

        if develop:
            if not os.path.isabs( develop ):
                develop = '#' + develop
            develop = self.replace_sconstruct_anchor( develop, cuppa_env )
            logger.debug( "Develop location specified [{}]".format( as_info( develop ) ) )

        if 'develop' in cuppa_env and cuppa_env['develop'] and develop:
            location = develop
            logger.debug( "--develop specified so using location=develop=[{}]".format( as_info( develop ) ) )

        self._location   = os.path.expanduser( location )
        self._full_url   = urlparse.urlparse( self._location )
        self._sub_dir    = None
        self._name_hint  = name_hint

        self._expanded_location = None
        self._plain_location = ""

        if extra_sub_path:
            if os.path.isabs( extra_sub_path ):
                raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) )
            else:
                self._sub_dir = os.path.normpath( extra_sub_path )

        ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to
        ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source
        ## once this is done
        local_directory = self.get_local_directory( cuppa_env, self._location, self._sub_dir, branch, self._full_url )

        logger.trace( "Local Directory for [{}] returned as [{}]".format(
                as_notice( self._location ),
                as_notice( local_directory )
        ) )

        self._base_local_directory = local_directory
        self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory

        ## Now that we have a locally accessible version of the dependency we can try to collate some information
        ## about it to allow us to specify what we are building with.
        self._url, self._repository, self._branch, self._remote, self._revision = self.get_info( self._location, self._local_directory, self._full_url )
        self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path )

        logger.debug( "Using [{}]{}{} at [{}] stored in [{}]".format(
                as_info( location ),
                ( self._branch and ":[{}]".format( as_info( str(self._branch) ) ) or "" ),
                ( self._remote and " from [{}]".format( as_info( str(self._remote) ) ) or "" ),
                as_info( self._version ),
                as_notice( self._local_directory )
        ) )
Пример #56
0
def get_boost_location( env, location, version, base, patched ):
    logger.debug( "Identify boost using location = [{}], version = [{}], base = [{}], patched = [{}]".format(
            as_info( str(location) ),
            as_info( str(version) ),
            as_info( str(base) ),
            as_info( str(patched) )
    ) )

    boost_home = None
    boost_location = None

    extra_sub_path = 'clean'
    if patched:
        extra_sub_path = 'patched'

    offline = env['offline']

    if location:
        location = _location_from_boost_version( location, offline )

        logger.trace( "Location after version detection = [{}]".format( as_notice( str(location) ) ) )

        if not location: # use version as a fallback in case both at specified
            location = _location_from_boost_version( version, offline )
        boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path, name_hint="boost" )

    elif base: # Find boost locally
        if not os.path.isabs( base ):
            base = os.path.abspath( base )

        if not version:
            boost_home = base
        elif version:
            search_list = [
                os.path.join( base, 'boost', version, 'source' ),
                os.path.join( base, 'boost', 'boost_' + version ),
                os.path.join( base, 'boost', version ),
                os.path.join( base, 'boost_' + version ),
            ]

            def exists_in( locations ):
                for location in locations:
                    home = _home_from_path( location )
                    if home:
                        return home
                return None

            boost_home = exists_in( search_list )
            if not boost_home:
                raise BoostException("Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]".format(version, str([l for l in search_list])))
        else:
            raise BoostException("Cannot construct Boost Object. No Home or Version specified")

        logger.debug( "Using boost found at [{}]".format( as_info( boost_home ) ) )
        boost_location = cuppa.location.Location( env, boost_home, extra_sub_path=extra_sub_path )
    else:
        location = _location_from_boost_version( version, offline )
        boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path )

    if patched:
        apply_patch_if_needed( boost_location.local(), get_boost_version ( boost_location.local() )[0] )

    return boost_location
Пример #57
0
    def __init__( self,
                  sconstruct_path,
                  base_path            = os.path.abspath( '.' ),
                  branch_root          = None,
                  default_options      = {},
                  default_projects     = [],
                  default_variants     = [],
                  default_dependencies = [],
                  default_profiles     = [],
                  dependencies         = [],
                  profiles             = [],
                  default_runner       = None,
                  configure_callback   = None,
                  tools                = [] ):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools( tools )

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies" )
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles" )

        self.initialise_options( cuppa_env, default_options, profiles, dependencies )
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback )

        enable_thirdparty_logging( cuppa_env.get_option( 'enable-thirdparty-logging' ) and True or False )
        self._set_verbosity_level( cuppa_env )

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env['sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format( cuppa_env )

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option( 'offline' )

        cuppa.version.check_current_version( cuppa_env['offline'] )

        if cuppa_env['offline']:
            logger.info( as_info_label( "Running in OFFLINE mode" ) )

        logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) )

        if dependencies_warning:
            logger.warn( dependencies_warning )

        if profiles_warning:
            logger.warn( profiles_warning )

        help = cuppa_env.get_option( 'help' ) and True or False

        cuppa_env['minimal_output']       = cuppa_env.get_option( 'minimal_output' )
        cuppa_env['ignore_duplicates']    = cuppa_env.get_option( 'ignore_duplicates' )

        cuppa_env['working_dir']          = os.getcwd()
        cuppa_env['launch_dir']           = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] )
        cuppa_env['run_from_launch_dir']  = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir']    = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len( cuppa_env['launch_dir'].split( os.path.sep ) )
            cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] )

        cuppa_env['base_path']   = os.path.normpath( os.path.expanduser( base_path ) )
        cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path
        cuppa_env['branch_dir']  = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None

        thirdparty = cuppa_env.get_option( 'thirdparty' )
        if thirdparty:
            thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) )

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options( cuppa_env )
        cuppa.core.location_options.process_location_options( cuppa_env )

        cuppa_env['default_projects']     = default_projects
        cuppa_env['default_variants']     = default_variants and set( default_variants ) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH']           = cuppa_env['default_dependencies']
        cuppa_env['dependencies']         = {}
        cuppa_env['default_profiles']     = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE']        = cuppa_env['default_profiles']
        cuppa_env['profiles']             = {}

        test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' )
        cuppa_env['default_runner']  = test_runner

        cuppa_env['propagate_env']       = cuppa_env.get_option( 'propagate-env' )       and True or False
        cuppa_env['propagate_path']      = cuppa_env.get_option( 'propagate-path' )      and True or False
        cuppa_env['merge_path']          = cuppa_env.get_option( 'merge-path' )          and True or False
        cuppa_env['show_test_output']    = cuppa_env.get_option( 'show-test-output' )    and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output' ) and True or False
        cuppa_env['dump']                = cuppa_env.get_option( 'dump' )                and True or False
        cuppa_env['clean']               = cuppa_env.get_option( 'clean' )               and True or False

        self.add_variants   ( cuppa_env )
        self.add_toolchains ( cuppa_env )
        self.add_platforms  ( cuppa_env )

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option( 'toolchains' )
        cuppa_env[ 'target_architectures' ] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ]
            else:
                toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile( name, profile ):
                cuppa_env['profiles'][name] = profile

            def add_dependency( name, dependency ):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options( "methods", cuppa_env )

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env( "dependencies",       cuppa_env, add_dependency )
                cuppa.modules.registration.add_to_env( "profiles",           cuppa_env, add_profile )
                cuppa.modules.registration.add_to_env( "methods",            cuppa_env )
                cuppa.modules.registration.add_to_env( "project_generators", cuppa_env )

                for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
                    method_plugin.load().add_to_env( cuppa_env )

                for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ):
                    profile_plugin.load().add_to_env( cuppa_env )

                if profiles:
                    for profile in profiles:
                        profile.add_to_env( cuppa_env, add_profile )

                logger.trace( "available profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["profiles"].keys() ) )
                ) )

                logger.info( "default profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["default_profiles"] ), as_info )
                ) )

                for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ):
                    dependency_plugin.load().add_to_env( cuppa_env, add_dependency )

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env( cuppa_env, add_dependency )


                logger.trace( "available dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["dependencies"].keys() ) )
                ) )

                logger.info( "default dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["default_dependencies"] ), as_info )
                ) )


            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info( as_info_label( "Running in DUMP mode, no building will be attempted" ) )
                cuppa_env.dump()

            job_count = cuppa_env.get_option( 'num_jobs' )
            parallel  = cuppa_env.get_option( 'parallel' )
            parallel_mode = "manually"

            if job_count==1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption( 'num_jobs', job_count )
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel']  = parallel
            if job_count>1:
                logger.debug( "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"),
                        as_info( "jobs" ),
                        as_emphasised(parallel_mode),
                        as_info( str( SCons.Script.GetOption( 'num_jobs') ) )
                ) )

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build( cuppa_env )

        if self._configure.handle_conf_only():
            print "cuppa: Handling configuration only, so no builds will be attempted."
            print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            print ""
            print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) )
            print ""
            print "cuppa: Nothing to be done. Exiting."
            SCons.Script.Exit()