Ejemplo n.º 1
0
def _lazy_update_library_list(env, emitting, libraries, prebuilt_libraries,
                              add_dependents, linktype, boost, stage_dir):
    def build_with_library_name(library):
        return library == 'log_setup' and 'log' or library

    if add_dependents:
        if not emitting:
            libraries = set(
                build_with_library_name(l)
                for l in add_dependent_libraries(boost, linktype, libraries))
        else:
            libraries = add_dependent_libraries(boost, linktype, libraries)

    if not stage_dir in prebuilt_libraries:
        logger.trace("Lazy update libraries list for [{}] to [{}]".format(
            as_info(stage_dir), colour_items(str(l) for l in libraries)))
        prebuilt_libraries[stage_dir] = set(libraries)
    else:
        logger.trace(
            "Lazy read libraries list for [{}]: libraries are [{}]".format(
                as_info(stage_dir), colour_items(str(l) for l in libraries)))
        libraries = [
            l for l in libraries if l not in prebuilt_libraries[stage_dir]
        ]
        prebuilt_libraries[stage_dir].update(libraries)

    return libraries
Ejemplo n.º 2
0
    def __call__( self, target, source, env ):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
        else:
            destination = self._destination + destination_subdir( env )

        master_index = env.File( os.path.join( self._destination, "test-report-index.json" ) )
        master_report = env.File( os.path.join( self._destination, "test-report-index.json" ) )

        env.Clean( source, master_index )
        env.Clean( source, master_report )

        ReportIndexBuilder.register_report_folders( final_dir=env['abs_final_dir'], destination_dir=self._destination )

        for html_report, json_report in zip(*[iter(source)]*2):
            target.append( os.path.join( destination, os.path.split( str(html_report) )[1] ) )
            json_report_target = env.File( os.path.join( destination, os.path.split( str(json_report) )[1] ) )
            target.append( json_report_target )
            ReportIndexBuilder.update_index( json_report_target, os.path.split(json_report_target.abspath)[0] )

        logger.trace( "sources = [{}]".format( colour_items( [str(s) for s in source] ) ) )
        logger.trace( "targets = [{}]".format( colour_items( [str(t) for t in target] ) ) )

        env.Depends( master_report, target )
        env.Depends( master_index, target )

        return target, source
Ejemplo n.º 3
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ):

        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['action']:
            _prebuilt_boost_libraries['action'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['action'][sconstruct_id].keys()) ) )

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._linktype       = linktype
        self._variant        = variant_name( self._env['variant'].name() )
        self._target_arch    = env['target_arch']
        self._toolchain      = env['toolchain']
        self._stage_dir      = stage_dir

        self._libraries = _lazy_update_library_list( env, False, libraries, _prebuilt_boost_libraries['action'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location       = boost.local()
        self._verbose_build  = verbose_build
        self._verbose_config = verbose_config
        self._job_count      = env['job_count']
        self._parallel       = env['parallel']
        self._threading      = True
Ejemplo n.º 4
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost ):
        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['emitter']:
            _prebuilt_boost_libraries['emitter'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['emitter'][sconstruct_id].keys()) ) )

        self._stage_dir    = stage_dir

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._libraries    = _lazy_update_library_list( env, True, libraries, _prebuilt_boost_libraries['emitter'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location     = boost.local()
        self._boost        = boost
        self._threading    = True

        self._linktype     = linktype
        self._variant      = variant_name( self._env['variant'].name() )
        self._toolchain    = env['toolchain']
Ejemplo n.º 5
0
    def __call__(self, target, source, env):

        logger.trace("target = {}".format(
            colour_items([str(t) for t in target])))
        logger.trace("source = {}".format(
            colour_items([str(s) for s in source])))

        cuppa.path.lazy_create_path(
            os.path.join(env['base_path'], env['build_dir']))

        self._target = target

        # Each source will result in one or more targets so we need to slice the targets to pick up
        # the gcov target (the first one) before we perform the zip iteration
        for s, t in zip(
                source,
                itertools.islice(target, 0, None,
                                 len(target) // len(source))):

            gcov_path = os.path.splitext(os.path.splitext(t.path)[0])[0]
            gcov_log = t.path
            logger.trace("gcov_path = [{}]".format(as_notice(str(gcov_path))))
            self._run_gcov(env, s.path, gcov_path, gcov_log)

        target = self._target

        return None
Ejemplo n.º 6
0
    def __init__(self, env, stage_dir, libraries, add_dependents, linktype,
                 boost, verbose_build, verbose_config):

        self._env = env

        logger.trace("Requested libraries [{}]".format(
            colour_items(libraries)))

        self._linktype = linktype
        self._variant = variant_name(self._env['variant'].name())
        self._target_arch = env['target_arch']
        self._toolchain = env['toolchain']
        self._stage_dir = stage_dir

        self._libraries = _lazy_update_library_list(env, False, libraries,
                                                    self.prebuilt_libraries,
                                                    add_dependents, linktype,
                                                    boost, self._stage_dir)

        logger.trace("Required libraries [{}]".format(
            colour_items(self._libraries)))

        self._location = boost.local()
        self._verbose_build = verbose_build
        self._verbose_config = verbose_config
        self._job_count = env['job_count']
        self._parallel = env['parallel']
Ejemplo n.º 7
0
    def __call__( self, target, source, env ):

        logger.trace( "target = [{}]".format( colour_items( [ str(node) for node in target ] ) ) )
        logger.trace( "source = [{}]".format( colour_items( [ str(node) for node in source ] ) ) )

        for html_report_src_tgt, json_report_src_tgt in zip(*[iter(itertools.izip( source, target ))]*2):

            html_report = html_report_src_tgt[0]
            json_report = json_report_src_tgt[0]

            html_target = html_report_src_tgt[1]
            json_target = json_report_src_tgt[1]

            logger.trace( "html_report = [{}]".format( as_notice( str(html_report) ) ) )
            logger.trace( "json_report = [{}]".format( as_info( str(json_report) ) ) )
            logger.trace( "html_target = [{}]".format( as_notice( str(html_target) ) ) )
            logger.trace( "json_target = [{}]".format( as_info( str(json_target) ) ) )

            destination = env['abs_final_dir']
            if  self._destination:
                destination = self._destination + destination_subdir( env )

            logger.trace( "report_summary = {}".format( str( self._read( str(json_report) ) ) ) )

            env.Execute( Copy( html_target, html_report ) )
            env.Execute( Copy( json_target, json_report ) )

        return None
Ejemplo n.º 8
0
Archivo: git.py Proyecto: j0nnyw/cuppa
    def get_branch(cls, path):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = cls.execute_command(
            "{git} show -s --pretty=\%d --decorate=full HEAD".format(
                git=cls.binary()), path)

        match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?',
                          result)

        if match:
            refs = [{
                "ref": r.strip(),
                "type": ""
            } for r in match.group("refs").split(',')]
            logger.trace("Refs (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items((r["ref"] for r in refs))))
            if refs:
                for ref in refs:
                    if ref["ref"].startswith("refs/heads/"):
                        ref["ref"] = ref["ref"][len("refs/heads/"):]
                        ref["type"] = "L"
                    elif ref["ref"].startswith("refs/tags/"):
                        ref["ref"] = ref["ref"][len("refs/tags/"):]
                        ref["type"] = "T"
                    elif ref["ref"].startswith("refs/remotes/"):
                        ref["ref"] = ref["ref"][len("refs/remotes/"):]
                        ref["type"] = "R"
                    else:
                        ref["type"] = "U"

                logger.trace(
                    "Refs (after classification) for [{}] are [{}]".format(
                        as_notice(path),
                        colour_items(
                            (":".join([r["type"], r["ref"]]) for r in refs))))

                if refs[0]["type"] == "L":
                    branch = refs[0]["ref"]
                elif refs[0]["type"] == "T":
                    branch = refs[0]["ref"]
                elif refs[0]["type"] == "R":
                    branch = refs[0]["ref"].split('/')[1]

                remote = next(
                    (ref["ref"] for ref in refs if ref["type"] == "R"), None)

            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(str(branch))))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch, remote
Ejemplo n.º 9
0
    def __call__( self, env, pattern, start=default, exclude_dirs=default ):

        base_path = os.path.realpath( env['sconscript_dir'] )

        if start == self.default:
            start = base_path

        start = os.path.expanduser( start )

        rel_start = os.path.relpath( base_path, start )

        logger.trace(
            "paths: start = [{}], base_path = [{}], rel_start = [{}]"
            .format( as_notice( start ), as_notice( base_path ), as_notice( rel_start ) )
        )

        if not os.path.isabs( start ):
            start = rel_start

        if exclude_dirs == self.default:
            exclude_dirs = [ env['download_root'], env['build_root' ] ]

        exclude_dirs_regex = None

        if exclude_dirs:
            def up_dir( path ):
                element = next( e for e in path.split(os.path.sep) if e )
                return element == ".."
            exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ]
            exclude_dirs = "|".join( exclude_dirs )
            exclude_dirs_regex = re.compile( exclude_dirs )

        matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex )

        logger.trace(
            "matches = [{}]."
            .format( colour_items( [ str(match) for match in matches ] ) )
        )

        make_relative = True
        if rel_start.startswith( os.pardir ):
            make_relative = False

        logger.trace( "make_relative = [{}].".format( as_notice( str(make_relative) ) ) )

        nodes = [ env.File( make_relative and os.path.relpath( match, base_path ) or match ) for match in matches ]

        logger.trace(
            "nodes = [{}]."
            .format( colour_items( [ str(node) for node in nodes ] ) )
        )

        return nodes
Ejemplo n.º 10
0
    def get_active_actions( self, cuppa_env, current_variant, active_variants, active_actions ):
        available_variants = cuppa_env[ self.variants_key ]
        available_actions  = cuppa_env[ self.actions_key ]
        specified_actions  = {}

        for key, action in available_actions.items():
            if cuppa_env.get_option( action.name() ) or action.name() in active_actions:
                specified_actions[ action.name() ] = action

        if not specified_actions:
            if active_variants:
                for variant_name in active_variants:
                    if available_actions.has_key( variant_name ):
                        specified_actions[ variant_name ] = available_actions[ variant_name ]

        active_actions = {}

        for key, action in specified_actions.items():
            if key not in available_variants:
                active_actions[ key ] = action
            elif key == current_variant.name():
                active_actions[ key ] = action

        logger.debug( "Specifying active_actions of [{}] for variant [{}]".format( colour_items( specified_actions, as_info ), current_variant.name() ) )

        return active_actions
Ejemplo n.º 11
0
    def get_branch(cls, path):
        branch = None
        try:
            result = cls.execute_command(
                "{git} symbolic-ref HEAD".format(git=cls.binary()), path)
            branch = result.replace("refs/heads/", "").strip()
            logger.trace("Branch (using symbolic-ref) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
            return branch
        except cls.Error:
            pass

        # In case we have a detached head we can fallback to this
        result = cls.execute_command(
            "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()), path)
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                branch = branches[-2].split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch
Ejemplo n.º 12
0
    def get_branch(cls, path):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = as_str(
            cls.execute_command(
                "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()),
                path))
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch, remote
Ejemplo n.º 13
0
    def get_active_actions(self, cuppa_env, current_variant, active_variants,
                           active_actions):
        available_variants = cuppa_env[self.variants_key]
        available_actions = cuppa_env[self.actions_key]
        specified_actions = {}

        for key, action in available_actions.items():
            if cuppa_env.get_option(
                    action.name()) or action.name() in active_actions:
                specified_actions[action.name()] = action

        if not specified_actions:
            if active_variants:
                for variant_name in active_variants:
                    if variant_name in available_actions.keys():
                        specified_actions[variant_name] = available_actions[
                            variant_name]

        active_actions = {}

        for key, action in specified_actions.items():
            if key not in available_variants:
                active_actions[key] = action
            elif key == current_variant.name():
                active_actions[key] = action

        logger.debug(
            "Specifying active_actions of [{}] for variant [{}]".format(
                colour_items(specified_actions, as_info),
                current_variant.name()))

        return active_actions
Ejemplo n.º 14
0
def add_dependent_libraries(boost, linktype, libraries):
    version = boost.numeric_version()
    patched_test = boost._patched_test
    required_libraries = set(libraries)

    logger.trace("Required Library Set = [{}]".format(
        colour_items([l for l in required_libraries])))

    for library in libraries:
        if library in boost_libraries_with_no_dependencies():
            continue
        elif library == 'chrono':
            required_libraries.update(['system'])
        elif library == 'coroutine':
            required_libraries.update(['context', 'system'])
            if version > 1.55:
                required_libraries.update(['thread'])
            if linktype == 'shared':
                required_libraries.update(['chrono'])
        elif library == 'filesystem':
            required_libraries.update(['system'])
        elif library == 'graph':
            required_libraries.update(['regex'])
        elif library == 'locale':
            required_libraries.update(['filesystem', 'system', 'thread'])
        elif library == 'log':
            required_libraries.update(
                ['date_time', 'filesystem', 'system', 'thread'])
        elif library == 'log_setup':
            required_libraries.update(
                ['log', 'date_time', 'filesystem', 'system', 'thread'])
        elif library in {
                'test', 'prg_exec_monitor', 'test_exec_monitor',
                'unit_test_framework'
        }:
            if library == 'test' and 'test' in required_libraries:
                required_libraries.remove('test')
                required_libraries.update(['unit_test_framework'])
            if patched_test:
                required_libraries.update(['timer, chrono', 'system'])
        elif library == 'timer':
            required_libraries.update(['chrono', 'system'])

    libraries = []

    for library in boost_dependency_order():
        if library in required_libraries:
            libraries.append(library)

    for library in required_libraries:
        if library not in boost_dependency_set():
            libraries.append(library)

    return libraries
Ejemplo n.º 15
0
    def add_toolchains( self, env ):
        toolchains = self.toolchains_key
        cuppa.modules.registration.add_to_env( toolchains, env, env.add_available_toolchain, env.add_supported_toolchain )

        logger.trace( "supported toolchains are [{}]".format(
                colour_items( env["supported_toolchains"] )
        ) )
        logger.info( "available toolchains are [{}]".format(
                colour_items( sorted( env[toolchains].keys(), reverse=True ), as_info )
        ) )

        SCons.Script.AddOption(
            '--toolchains',
            type     = 'string',
            nargs    = 1,
            action   = 'callback',
            callback = ParseToolchainsOption( env['supported_toolchains'], env[toolchains].keys() ),
            help     = 'The Toolchains you wish to build against. A comma separate list with wildcards'
                       ' may be provided. For example --toolchains=gcc*,clang37,clang36'
        )
Ejemplo n.º 16
0
    def __init__(self, env, stage_dir, libraries, add_dependents, linktype,
                 boost, verbose_build, verbose_config):

        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['action']:
            _prebuilt_boost_libraries['action'][sconstruct_id] = {}

        logger.trace(
            "Current Boost build [{}] has the following build variants [{}]".
            format(
                as_info(sconstruct_id),
                colour_items(_prebuilt_boost_libraries['action']
                             [sconstruct_id].keys())))

        logger.debug("Requested libraries [{}]".format(
            colour_items(libraries)))

        self._linktype = linktype
        self._variant = variant_name(self._env['variant'].name())
        self._target_arch = env['target_arch']
        self._toolchain = env['toolchain']
        self._stage_dir = stage_dir

        self._libraries = _lazy_update_library_list(
            env, False, libraries,
            _prebuilt_boost_libraries['action'][sconstruct_id], add_dependents,
            linktype, boost, self._stage_dir)

        logger.debug("Required libraries [{}]".format(
            colour_items(self._libraries)))

        self._location = boost.local()
        self._verbose_build = verbose_build
        self._verbose_config = verbose_config
        self._job_count = env['job_count']
        self._parallel = env['parallel']
        self._threading = True
Ejemplo n.º 17
0
    def __call__(self, target, source, env):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
        else:
            destination = self._destination + destination_subdir(env)

        master_index = env.File(
            os.path.join(self._destination, "test-report-index.json"))
        master_report = env.File(
            os.path.join(self._destination, "test-report-index.json"))

        env.Clean(source, master_index)
        env.Clean(source, master_report)

        ReportIndexBuilder.register_report_folders(
            final_dir=env['abs_final_dir'], destination_dir=self._destination)

        for html_report, json_report in zip(*[iter(source)] * 2):
            target.append(
                os.path.join(destination,
                             os.path.split(str(html_report))[1]))
            json_report_target = env.File(
                os.path.join(destination,
                             os.path.split(str(json_report))[1]))
            target.append(json_report_target)
            ReportIndexBuilder.update_index(
                json_report_target,
                os.path.split(json_report_target.abspath)[0])

        logger.trace("sources = [{}]".format(
            colour_items([str(s) for s in source])))
        logger.trace("targets = [{}]".format(
            colour_items([str(t) for t in target])))

        env.Depends(master_report, target)
        env.Depends(master_index, target)

        return target, source
Ejemplo n.º 18
0
    def __call__( self, env, pattern, start=default, exclude_dirs=default ):

        start, rel_start, base_path = relative_start( env, start, self.default )

        if exclude_dirs == self.default:
            exclude_dirs = [ env['download_root'], env['build_root' ] ]

        exclude_dirs_regex = None

        if exclude_dirs:
            def up_dir( path ):
                element = next( e for e in path.split(os.path.sep) if e )
                return element == ".."
            exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ]
            exclude_dirs = "|".join( exclude_dirs )
            exclude_dirs_regex = re.compile( exclude_dirs )

        matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex )

        logger.trace(
            "matches = [{}]."
            .format( colour_items( [ str(match) for match in matches ] ) )
        )

        make_relative = True
        if rel_start.startswith( os.pardir ):
            make_relative = False

        logger.trace( "make_relative = [{}].".format( as_notice( str(make_relative) ) ) )

        nodes = [ env.File( make_relative and os.path.relpath( match, base_path ) or match ) for match in matches ]

        logger.trace(
            "nodes = [{}]."
            .format( colour_items( [ str(node) for node in nodes ] ) )
        )

        return nodes
Ejemplo n.º 19
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ):

        self._env = env

        logger.trace( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._linktype       = linktype
        self._variant        = variant_name( self._env['variant'].name() )
        self._target_arch    = env['target_arch']
        self._toolchain      = env['toolchain']
        self._stage_dir      = stage_dir

        self._libraries = lazy_update_library_list( env, False, libraries, self._built_libraries, add_dependents, linktype, boost, self._stage_dir )

        logger.trace( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location       = boost.local()
        self._version        = boost.numeric_version()
        self._full_version   = boost.full_version()
        self._verbose_build  = verbose_build
        self._verbose_config = verbose_config
        self._job_count      = env['job_count']
        self._parallel       = env['parallel']
Ejemplo n.º 20
0
    def __call__(self, target, source, env):

        logger.trace("target = [{}]".format(
            colour_items([str(node) for node in target])))
        logger.trace("source = [{}]".format(
            colour_items([str(node) for node in source])))

        for html_report_src_tgt, json_report_src_tgt in zip(
                *[iter(zip(source, target))] * 2):

            html_report = html_report_src_tgt[0]
            json_report = json_report_src_tgt[0]

            html_target = html_report_src_tgt[1]
            json_target = json_report_src_tgt[1]

            logger.trace("html_report = [{}]".format(
                as_notice(str(html_report))))
            logger.trace("json_report = [{}]".format(as_info(
                str(json_report))))
            logger.trace("html_target = [{}]".format(
                as_notice(str(html_target))))
            logger.trace("json_target = [{}]".format(as_info(
                str(json_target))))

            # TODO: Check use of destination as it is currently unused
            destination = env['abs_final_dir']
            if self._destination:
                destination = self._destination + destination_subdir(env)

            logger.trace("report_summary = {}".format(
                str(self._read(str(json_report)))))

            env.Execute(Copy(html_target, html_report))
            env.Execute(Copy(json_target, json_report))

        return None
Ejemplo n.º 21
0
    def __call__(self, target, source, env):
        destination = self._destination
        if not destination:
            destination = env['abs_final_dir']
            env.Clean(source,
                      os.path.join(self._destination, "coverage-index.html"))
        else:
            env.Clean(source,
                      os.path.join(self._destination, "coverage-index.html"))
            destination = self._destination + destination_subdir(env)

        files_node = next(
            (s for s in source if os.path.splitext(str(s))[1] == ".cov_files"),
            None)
        if files_node:
            variant_index_file = os.path.join(env['abs_final_dir'],
                                              coverage_index_name_from(env))
            target.append(variant_index_file)
            env.Clean(
                source,
                os.path.join(destination,
                             os.path.split(variant_index_file)[1]))

            variant_summary_file = os.path.splitext(
                variant_index_file)[0] + ".log"
            target.append(variant_summary_file)

            CoverageIndexBuilder.register_coverage_folders(
                final_dir=env['abs_final_dir'],
                destination_dir=self._destination)

        logger.trace("sources = [{}]".format(
            colour_items([str(s) for s in source])))
        logger.trace("targets = [{}]".format(
            colour_items([str(t) for t in target])))

        return target, source
Ejemplo n.º 22
0
    def __init__(self, env, stage_dir, libraries, add_dependents, linktype,
                 boost):
        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['emitter']:
            _prebuilt_boost_libraries['emitter'][sconstruct_id] = {}

        logger.trace(
            "Current Boost build [{}] has the following build variants [{}]".
            format(
                as_info(sconstruct_id),
                colour_items(_prebuilt_boost_libraries['emitter']
                             [sconstruct_id].keys())))

        self._stage_dir = stage_dir

        logger.debug("Requested libraries [{}]".format(
            colour_items(libraries)))

        self._libraries = _lazy_update_library_list(
            env, True, libraries,
            _prebuilt_boost_libraries['emitter'][sconstruct_id],
            add_dependents, linktype, boost, self._stage_dir)

        logger.debug("Required libraries [{}]".format(
            colour_items(self._libraries)))

        self._location = boost.local()
        self._boost = boost
        self._threading = True

        self._linktype = linktype
        self._variant = variant_name(self._env['variant'].name())
        self._toolchain = env['toolchain']
Ejemplo n.º 23
0
def lazy_update_library_list(env, emitting, libraries, built_libraries,
                             add_dependents, linktype, boost, stage_dir):

    if add_dependents:
        if not emitting:
            libraries = set(
                build_with_library_name(l)
                for l in add_dependent_libraries(boost, linktype, libraries))
        else:
            libraries = add_dependent_libraries(boost, linktype, libraries)

    if not stage_dir in built_libraries:
        logger.debug("Lazy update libraries list for [{}] to [{}]".format(
            as_info(stage_dir), colour_items(str(l) for l in libraries)))
        built_libraries[stage_dir] = set(libraries)
    else:
        logger.debug(
            "Lazy read libraries list for [{}]: libraries are [{}]".format(
                as_info(stage_dir), colour_items(str(l) for l in libraries)))
        libraries = [
            l for l in libraries if l not in built_libraries[stage_dir]
        ]

    return libraries
Ejemplo n.º 24
0
def add_dependent_libraries( boost, linktype, libraries ):
    version = boost.numeric_version()
    patched_test = boost._patched_test
    required_libraries = set( libraries )

    logger.trace( "Required Library Set = [{}]".format( colour_items( [l for l in required_libraries] ) ) )

    for library in libraries:
        if library in boost_libraries_with_no_dependencies():
            continue
        elif library == 'chrono':
            required_libraries.update( ['system'] )
        elif library == 'coroutine':
            required_libraries.update( ['context', 'system'] )
            if version > 1.55:
                required_libraries.update( ['thread'] )
            if linktype == 'shared':
                required_libraries.update( ['chrono'] )
        elif library == 'filesystem':
            required_libraries.update( ['system'] )
        elif library == 'graph':
            required_libraries.update( ['regex'] )
        elif library == 'locale':
            required_libraries.update( ['filesystem', 'system', 'thread'] )
        elif library == 'log':
            required_libraries.update( ['date_time', 'filesystem', 'system', 'thread'] )
        elif library == 'log_setup':
            required_libraries.update( ['log', 'date_time', 'filesystem', 'system', 'thread'] )
        elif library in { 'test', 'prg_exec_monitor', 'test_exec_monitor', 'unit_test_framework' }:
            if library == 'test' and 'test' in required_libraries:
                required_libraries.remove( 'test' )
                required_libraries.update( ['unit_test_framework'] )
            if patched_test:
                required_libraries.update( ['timer, chrono', 'system'] )
        elif library == 'timer':
            required_libraries.update( ['chrono', 'system'] )

    libraries = []

    for library in boost_dependency_order():
        if library in required_libraries:
            libraries.append( library )

    for library in required_libraries:
        if library not in boost_dependency_set():
            libraries.append( library )

    return libraries
Ejemplo n.º 25
0
    def __call__(self, env, target, source, match=None, exclude=None):
        destination = target
        if destination[0] != '#' and not os.path.isabs(destination):
            destination = os.path.join(env['abs_final_dir'], destination)

        filtered_nodes = filter_nodes(source, match, exclude)

        if filtered_nodes:

            logger.trace("filtered_nodes = [{}]".format(
                colour_items([str(n) for n in filtered_nodes])))

            installed_files = env.Install(destination, filtered_nodes)
            cuppa.progress.NotifyProgress.add(env, installed_files)
            return installed_files
        return []
Ejemplo n.º 26
0
    def __call__( self, env, libraries ):

        if not self._add_dependents:
            logger.warn( "BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']( env )

        logger.trace( "Build static libraries [{}]".format( colour_items( libraries ) ) )

        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'static' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Ejemplo n.º 27
0
def _lazy_update_library_list( env, emitting, libraries, prebuilt_libraries, add_dependents, linktype, boost, stage_dir ):

    def build_with_library_name( library ):
        if library == 'log_setup':
            return 'log'
        elif library in { 'prg_exec_monitor', 'test_exec_monitor', 'unit_test_framework' }:
            return 'test'
        else:
            return library

    if add_dependents:
        if not emitting:
            libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) )
        else:
            libraries = add_dependent_libraries( boost, linktype, libraries )

    if not stage_dir in prebuilt_libraries:
        logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        prebuilt_libraries[ stage_dir ] = set( libraries )
    else:
        logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        libraries = [ l for l in libraries if l not in prebuilt_libraries[ stage_dir ] ]
        prebuilt_libraries[ stage_dir ].update( libraries )

    return libraries
Ejemplo n.º 28
0
    def __call__( self, env, target, source, libraries, linktype ):

        sconstruct_id = env['sconstruct_path']

        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['builder']:
            _prebuilt_boost_libraries['builder'][sconstruct_id] = {}

        global _bjam_invocations
        if sconstruct_id not in _bjam_invocations:
            _bjam_invocations[sconstruct_id] = []

        logger.trace( "Build Dir = [{}]".format( as_info( env['build_dir'] ) ) )

        logger.trace( "Requested libraries = [{}]".format( colour_items( libraries ) ) )

        variant      = variant_name( env['variant'].name() )
        target_arch  = env['target_arch']
        toolchain    = env['toolchain']
        stage_dir    = stage_directory( toolchain, variant, target_arch, toolchain.abi_flag(env) )
        variant_key  = stage_dir

        logger.trace( "Prebuilt Libraries Variant Key = [{}]".format( as_notice( variant_key ) ) )

        library_action  = BoostLibraryAction ( env, stage_dir, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config )
        library_emitter = BoostLibraryEmitter( env, stage_dir, libraries, self._add_dependents, linktype, self._boost )

        logger.trace( "Action  Prebuilt Libraries for [{}] = {}".format(
                as_info( variant_key ),
                colour_items( _prebuilt_boost_libraries['action'][sconstruct_id][variant_key] )
        ) )

        logger.trace( "Emitter Prebuilt Libraries for [{}] = {}".format(
                as_info( variant_key ),
                colour_items( _prebuilt_boost_libraries['emitter'][sconstruct_id][variant_key] )
        ) )

        env.AppendUnique( BUILDERS = {
            'BoostLibraryBuilder' : env.Builder( action=library_action, emitter=library_emitter )
        } )

        built_libraries = env.BoostLibraryBuilder( target, source )

        built_libraries_map = { extract_library_name_from_path(l):l for l in built_libraries }

        logger.trace( "Libraries to be built = [{}]".format( colour_items( built_libraries_map.keys() ) ) )

        if not variant_key in _prebuilt_boost_libraries['builder'][sconstruct_id]:
             _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ] = {}

        logger.trace( "Variant sources = [{}]".format( colour_items( _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ].keys() ) ) )

        required_libraries = add_dependent_libraries( self._boost, linktype, libraries )

        logger.trace( "Required libraries = [{}]".format( colour_items( required_libraries ) ) )

        unbuilt_libraries = False
        new_libraries = []

        for library in required_libraries:
            if library in _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ]:

                logger.trace( "Library [{}] already present in variant [{}]".format( as_notice(library), as_info(variant_key) ) )

                # Calling Depends() is required so SCons knows about the relationship, even
                # if the library already exists in the _prebuilt_boost_libraries dict
                logger.trace( "Add Depends for [{}]".format( as_notice( _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library].path ) ) )
                env.Depends( built_libraries, _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library] )
            else:
                unbuilt_libraries = True
                new_libraries.append( library )
                _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library] = built_libraries_map[library]

            env.Depends( target, _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library] )

        logger.trace( "Library sources for variant [{}] = [{}]".format(
                as_info(variant_key),
                colour_items( k+":"+as_info(v.path) for k,v in _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ].iteritems() )
        ) )


        if unbuilt_libraries:
            # if this is not the first BJAM invocation for this set of libraries make it require (using Requires)
            # the previous BJAM invocation otherwise we already have an invocation of BJAM that will create the
            # required libraries and therefore we can ignore the invocation

            index = len(_bjam_invocations[sconstruct_id])
            previous_invocation = _bjam_invocations[sconstruct_id] and _bjam_invocations[sconstruct_id][-1] or None

            if previous_invocation and previous_invocation['invocation'] != built_libraries:
                logger.debug( "Add BJAM invocation Requires() such that ([{}][{}][{}]) requires ([{}][{}][{}])".format(
                            as_info(str(index)),
                            as_info(variant_key),
                            colour_items( new_libraries ),
                            as_info(str(previous_invocation['index'])),
                            as_info(previous_invocation['variant']),
                            colour_items( previous_invocation['libraries'] )
                ) )
                env.Requires( built_libraries, previous_invocation['invocation'] )
            # if this is the first invocation of BJAM then add it to the list of BJAM invocations, or if this is
            # a different invocation (for different libraries) of BJAM add it to the list of invocations
            if not previous_invocation or previous_invocation['invocation'] != built_libraries and built_libraries:
                logger.debug( "Adding BJAM invocation [{}] for variant [{}] and new libraries [{}] to invocation list".format(
                            as_info(str(index)),
                            as_info(variant_key),
                            colour_items( new_libraries )
                ) )
                _bjam_invocations[sconstruct_id].append( {
                        'invocation': built_libraries,
                        'index'     : index,
                        'variant'   : variant_key,
                        'libraries' : new_libraries
                } )


        bjam = env.Command( bjam_exe( self._boost ), [], BuildBjam( self._boost ) )
        env.NoClean( bjam )

        if built_libraries:

            env.Requires( built_libraries, bjam )

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam" )
                toolset_config_jam = env.Command( toolset_target, [], WriteToolsetConfigJam() )

                project_config_target = os.path.join( self._boost.local(), "project-config.jam" )
                if not os.path.exists( project_config_target ):
                    project_config_jam = env.Requires( project_config_target, env.AlwaysBuild( toolset_config_jam ) )
                    env.Requires( built_libraries, project_config_jam )

                env.Requires( built_libraries, toolset_config_jam )

        install_dir = linktype == 'shared' and env['abs_final_dir'] or env['abs_build_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug( "Install Boost library [{}:{}] to [{}]".format( as_notice(library), as_info(str(_prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library])), as_notice(install_dir) ) )

            library_node = _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library]

            logger.trace( "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]\n[{}]".format(
                    as_notice(library_node.path),
                    as_notice(str(library_node)),
                    as_notice(str(library_node.get_binfo().bact) ),
                    as_notice(str(library_node.get_state()) ),
                    as_notice(str(library_node.srcnode())   )
            ) )

            installed_library = env.CopyFiles( install_dir, _prebuilt_boost_libraries['builder'][sconstruct_id][ variant_key ][library] )

            installed_libraries.append( installed_library )

        logger.debug( "Boost 'Installed' Libraries = [{}]".format( colour_items( l.path for l in Flatten( installed_libraries ) ) ) )

        return Flatten( installed_libraries )
Ejemplo n.º 29
0
    def __call__(self, env, target, source, libraries, linktype):

        logger.trace("Build Dir = [{}]".format(as_info(env['build_dir'])))

        logger.trace("Requested libraries = [{}]".format(
            colour_items(libraries)))

        variant = variant_name(env['variant'].name())
        target_arch = env['target_arch']
        toolchain = env['toolchain']
        stage_dir = stage_directory(toolchain, variant, target_arch,
                                    toolchain.abi_flag(env))
        variant_key = stage_dir

        logger.trace("Prebuilt Libraries Variant Key = [{}]".format(
            as_notice(variant_key)))

        library_action = BoostLibraryAction(env, stage_dir, libraries,
                                            self._add_dependents, linktype,
                                            self._boost, self._verbose_build,
                                            self._verbose_config)
        library_emitter = BoostLibraryEmitter(env, stage_dir, libraries,
                                              self._add_dependents, linktype,
                                              self._boost)

        logger.trace("Action  Prebuilt Libraries for [{}] = {}".format(
            as_info(variant_key),
            colour_items(BoostLibraryAction.prebuilt_libraries[variant_key])))
        logger.trace("Emitter Prebuilt Libraries for [{}] = {}".format(
            as_info(variant_key),
            colour_items(BoostLibraryEmitter.prebuilt_libraries[variant_key])))

        env.AppendUnique(
            BUILDERS={
                'BoostLibraryBuilder':
                env.Builder(action=library_action, emitter=library_emitter)
            })

        built_libraries = env.BoostLibraryBuilder(target, source)

        built_libraries_map = {
            extract_library_name_from_path(l): l
            for l in built_libraries
        }

        logger.trace("Libraries to be built = [{}]".format(
            colour_items(built_libraries_map.keys())))

        if not variant_key in self._prebuilt_libraries:
            self._prebuilt_libraries[variant_key] = {}

        logger.trace("Variant sources = [{}]".format(
            colour_items(self._prebuilt_libraries[variant_key].keys())))

        required_libraries = add_dependent_libraries(self._boost, linktype,
                                                     libraries)

        logger.trace("Required libraries = [{}]".format(
            colour_items(required_libraries)))

        for library in required_libraries:
            if library in self._prebuilt_libraries[variant_key]:

                logger.trace(
                    "Library [{}] already present in variant [{}]".format(
                        as_notice(library), as_info(variant_key)))

                #if library not in built_libraries_map: # The Depends is required regardless so SCons knows about the relationship
                logger.trace("Add Depends for [{}]".format(
                    as_notice(
                        self._prebuilt_libraries[variant_key][library].path)))
                env.Depends(built_libraries,
                            self._prebuilt_libraries[variant_key][library])
            else:
                self._prebuilt_libraries[variant_key][
                    library] = built_libraries_map[library]

        logger.trace("Library sources for variant [{}] = [{}]".format(
            as_info(variant_key),
            colour_items(k + ":" + as_info(v.path) for k, v in
                         self._prebuilt_libraries[variant_key].iteritems())))

        bjam = env.Command(bjam_exe(self._boost), [], BuildBjam(self._boost))
        env.NoClean(bjam)

        if built_libraries:

            env.Requires(built_libraries, bjam)

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join(
                    self._boost.local(), env['toolchain'].name() + "._jam")
                toolset_config_jam = env.Command(toolset_target, [],
                                                 WriteToolsetConfigJam())

                project_config_target = os.path.join(self._boost.local(),
                                                     "project-config.jam")
                if not os.path.exists(project_config_target):
                    project_config_jam = env.Requires(
                        project_config_target,
                        env.AlwaysBuild(toolset_config_jam))
                    env.Requires(built_libraries, project_config_jam)

                env.Requires(built_libraries, toolset_config_jam)

        install_dir = linktype == 'shared' and env['abs_final_dir'] or env[
            'abs_build_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug("Install Boost library [{}:{}] to [{}]".format(
                as_notice(library),
                as_info(str(self._prebuilt_libraries[variant_key][library])),
                as_notice(install_dir)))

            library_node = self._prebuilt_libraries[variant_key][library]

            logger.trace(
                "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]\n[{}]".format(
                    as_notice(library_node.path), as_notice(str(library_node)),
                    as_notice(str(library_node.get_binfo().bact)),
                    as_notice(str(library_node.get_state())),
                    as_notice(str(library_node.srcnode()))))

            installed_library = env.CopyFiles(
                install_dir, self._prebuilt_libraries[variant_key][library])

            installed_libraries.append(installed_library)

        logger.debug("Boost 'Installed' Libraries = [{}]".format(
            colour_items(l.path for l in Flatten(installed_libraries))))

        return Flatten(installed_libraries)
Ejemplo n.º 30
0
Archivo: git.py Proyecto: iCodeIN/cuppa
    def get_branch(cls, path):
        branch = None
        remote = None

        head_detached = False
        command = "{git} branch".format(git=cls.binary())
        branch_info = cls.execute_command(command, path)
        if branch_info:
            match = re.search(r'^[*] [(]HEAD detached ', branch_info)
            if match:
                head_detached = True

        if not head_detached:
            result = cls.execute_command(
                "{git} status -sb".format(git=cls.binary()), path)
            if result:
                match = re.search(
                    r'## (?P<branch>[^)]+)[.][.][.](?P<remote>[^)\n]+)',
                    result)
                if match:
                    branch = match.group("branch")
                    remote = match.group("remote")
                match = re.search(r'## HEAD (no branch)', result)
                # Check if we are rebasing
                if match:
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command, path)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))

            return branch, remote

        else:
            result = cls.execute_command(
                "{git} show -s --pretty=\%d --decorate=full HEAD".format(
                    git=cls.binary()), path)

            match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?',
                              result)

            if match and match.group("refs"):
                refs = [{
                    "ref": r.strip(),
                    "type": ""
                } for r in match.group("refs").split(',')]
                logger.trace("Refs (using show) for [{}] are [{}]".format(
                    as_notice(path), colour_items((r["ref"] for r in refs))))
                if refs:
                    for ref in refs:
                        if ref["ref"].startswith("refs/heads/"):
                            ref["ref"] = ref["ref"][len("refs/heads/"):]
                            ref["type"] = "L"
                        elif ref["ref"].startswith("refs/tags/"):
                            ref["ref"] = ref["ref"][len("refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("tag: refs/tags/"):
                            ref["ref"] = ref["ref"][len("tag: refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("refs/remotes/"):
                            ref["ref"] = ref["ref"][len("refs/remotes/"):]
                            ref["type"] = "R"
                        else:
                            ref["type"] = "U"

                    logger.trace(
                        "Refs (after classification) for [{}] are [{}]".format(
                            as_notice(path),
                            colour_items((":".join([r["type"], r["ref"]])
                                          for r in refs))))

                    if refs[0]["type"] == "L":
                        branch = refs[0]["ref"]
                    #elif refs[0]["type"] == "T":
                    #branch = refs[0]["ref"]
                    elif refs[0]["type"] == "R":
                        branch = refs[0]["ref"].split('/')[1]

                    remote = next(
                        (ref["ref"] for ref in refs if ref["type"] == "R"),
                        None)

                logger.trace("Branch (using show) for [{}] is [{}]".format(
                    as_notice(path), as_info(str(branch))))
            else:
                if result == "(HEAD)":
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))
        #if not branch:
        #logger.warn( as_warning( "No branch found from [{}]".format( result ) ) )

        return branch, remote
Ejemplo n.º 31
0
    def create_build_envs( self, toolchain, cuppa_env ):

        propagate_environment = cuppa_env['propagate_env']
        propagate_path        = cuppa_env['propagate_path']
        merge_path            = cuppa_env['merge_path']

        variants = cuppa_env[ self.variants_key ]
        actions  = cuppa_env[ self.actions_key ]

        target_architectures = cuppa_env[ 'target_architectures' ]

        if not target_architectures:
            target_architectures = [ None ]

        def get_active_from_options( tasks ):
            active_tasks = {}
            for key, task in tasks.items():
                if cuppa_env.get_option( task.name() ):
                    active_tasks[ task.name() ] = task
            return active_tasks

        active_variants = get_active_from_options( variants )
        active_actions  = get_active_from_options( actions )

        def get_active_from_defaults( default_tasks, tasks ):
            active_tasks = {}
            for task in default_tasks:
                if tasks.has_key( task ):
                    active_tasks[ task ] = tasks[ task ]
            return active_tasks

        if not active_variants and not active_actions:
            default_variants = cuppa_env['default_variants'] or toolchain.default_variants()
            if default_variants:
                active_variants = get_active_from_defaults( default_variants, variants )
                active_actions = get_active_from_defaults( default_variants, actions )
                if active_variants:
                    logger.info( "Default build variants of [{}] being used.".format( colour_items( active_variants, as_info ) ) )
                if active_actions:
                    logger.info( "Default build actions of [{}] being used.".format( colour_items( active_actions, as_info ) ) )

        if not active_variants:
            active_variants = get_active_from_defaults( toolchain.default_variants(), variants )
            logger.info( "No active variants specified so toolchain defaults of [{}] being used.".format( colour_items( active_variants, as_info ) ) )

        logger.debug( "Using active_variants = [{}]".format( colour_items( active_variants, as_info ) ) )
        logger.debug( "Using active_actions = [{}]".format( colour_items( active_actions, as_info ) ) )

        build_envs = []

        for key, variant in active_variants.items():

            for target_arch in target_architectures:

                env, target_arch = toolchain.make_env( cuppa_env, variant, target_arch )

                if env:

                    # TODO: Refactor this code out
                    if propagate_environment or propagate_path or merge_path:

                        def merge_paths( default_paths, env_paths ):
                            path_set = set( default_paths + env_paths )
                            def record_path( path ):
                                path_set.discard(path)
                                return path
                            return [ record_path(p) for p in default_paths + env_paths if p in path_set ]

                        def get_paths_from( environment ):
                            return 'PATH' in environment and environment['PATH'].split(os.pathsep) or []

                        default_paths = get_paths_from( env['ENV'] )
                        env_paths = get_paths_from( os.environ )
                        if propagate_environment:
                            env['ENV'] = os.environ.copy()
                            logger.debug( "propagating environment for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    as_notice( str(env['ENV']) ) )
                            )
                        if propagate_path and not propagate_environment:
                            env['ENV']['PATH'] = env_paths
                            logger.debug( "propagating PATH for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    colour_items( env_paths ) )
                            )
                        elif merge_path:
                            merged_paths = merge_paths( default_paths, env_paths )
                            env['ENV']['PATH'] = os.pathsep.join( merged_paths )
                            logger.debug( "merging PATH for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    colour_items( merged_paths ) )
                            )

                    build_envs.append( {
                        'variant': key,
                        'target_arch': target_arch,
                        'abi': toolchain.abi( env ),
                        'env': env } )

                    if not cuppa_env['raw_output']:
                        cuppa.output_processor.Processor.install( env )

                    env['toolchain']       = toolchain
                    env['variant']         = variant
                    env['target_arch']     = target_arch
                    env['abi']             = toolchain.abi( env )
                    env['variant_actions'] = self.get_active_actions( cuppa_env, variant, active_variants, active_actions )

        return build_envs
Ejemplo n.º 32
0
    def build(self, cuppa_env):

        #        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects = cuppa_env.get_option('projects')
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                    cuppa_env['launch_dir'],
                    [cuppa_env['build_root'], cuppa_env['download_root']])
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info("Using sub-sconscripts [{}]".format(
                        colour_items(projects)))
            elif projects:
                logger.info("Using default_projects [{}]".format(
                    colour_items(projects)))

        if projects:

            sconscripts = []

            for project in projects:

                if (not os.path.exists(project)
                        and not cuppa_env['run_from_launch_dir']
                        and not os.path.isabs(project)):

                    path = os.path.join(cuppa_env['launch_dir'], project)

                    if os.path.exists(path):
                        if os.path.isdir(path):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project, [
                                    cuppa_env['build_root'],
                                    cuppa_env['download_root']
                                ])
                            if sub_sconscripts:
                                logger.info(
                                    "Reading project folder [{}] and using sub-sconscripts [{}]"
                                    .format(project,
                                            colour_items(sub_sconscripts)))
                                sconscripts.extend(sub_sconscripts)
                        else:
                            sconscripts.append(path)

                elif os.path.exists(project) and os.path.isdir(project):
                    sub_sconscripts = self.get_sub_sconscripts(
                        project,
                        [cuppa_env['build_root'], cuppa_env['download_root']])
                    if sub_sconscripts:
                        logger.info(
                            "Reading project folder [{}] and using sub-sconscripts [{}]"
                            .format(project, colour_items(sub_sconscripts)))
                        sconscripts.extend(sub_sconscripts)
                else:
                    sconscripts.append(project)

            for toolchain in toolchains:
                build_envs = self.create_build_envs(toolchain, cuppa_env)
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option('decider')
                        if decider:
                            build_env['env'].Decider(decider)
                        self.call_project_sconscript_files(
                            toolchain, build_env['variant'],
                            build_env['target_arch'], build_env['abi'],
                            build_env['env'], sconscript)

            if cuppa_env['dump']:
                print(
                    "cuppa: Performing dump only, so no builds will be attempted."
                )
                print("cuppa: Nothing to be done. Exiting.")
                SCons.Script.Exit()

        else:
            logger.warn("No projects to build. Nothing to be done")
Ejemplo n.º 33
0
    def __call__(self, env, target, source, libraries, linktype):

        sconstruct_id = env['sconstruct_path']

        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['builder']:
            _prebuilt_boost_libraries['builder'][sconstruct_id] = {}

        global _bjam_invocations
        if sconstruct_id not in _bjam_invocations:
            _bjam_invocations[sconstruct_id] = []

        logger.trace("Build Dir = [{}]".format(as_info(env['build_dir'])))

        logger.trace("Requested libraries = [{}]".format(
            colour_items(libraries)))

        variant = variant_name(env['variant'].name())
        target_arch = env['target_arch']
        toolchain = env['toolchain']
        stage_dir = stage_directory(toolchain, variant, target_arch,
                                    toolchain.abi_flag(env))
        variant_key = stage_dir

        logger.trace("Prebuilt Libraries Variant Key = [{}]".format(
            as_notice(variant_key)))

        library_action = BoostLibraryAction(env, stage_dir, libraries,
                                            self._add_dependents, linktype,
                                            self._boost, self._verbose_build,
                                            self._verbose_config)
        library_emitter = BoostLibraryEmitter(env, stage_dir, libraries,
                                              self._add_dependents, linktype,
                                              self._boost)

        logger.trace("Action  Prebuilt Libraries for [{}] = {}".format(
            as_info(variant_key),
            colour_items(_prebuilt_boost_libraries['action'][sconstruct_id]
                         [variant_key])))

        logger.trace("Emitter Prebuilt Libraries for [{}] = {}".format(
            as_info(variant_key),
            colour_items(_prebuilt_boost_libraries['emitter'][sconstruct_id]
                         [variant_key])))

        env.AppendUnique(
            BUILDERS={
                'BoostLibraryBuilder':
                env.Builder(action=library_action, emitter=library_emitter)
            })

        built_libraries = env.BoostLibraryBuilder(target, source)

        built_libraries_map = {
            extract_library_name_from_path(l): l
            for l in built_libraries
        }

        logger.trace("Libraries to be built = [{}]".format(
            colour_items(built_libraries_map.keys())))

        if not variant_key in _prebuilt_boost_libraries['builder'][
                sconstruct_id]:
            _prebuilt_boost_libraries['builder'][sconstruct_id][
                variant_key] = {}

        logger.trace("Variant sources = [{}]".format(
            colour_items(_prebuilt_boost_libraries['builder'][sconstruct_id]
                         [variant_key].keys())))

        required_libraries = add_dependent_libraries(self._boost, linktype,
                                                     libraries)

        logger.trace("Required libraries = [{}]".format(
            colour_items(required_libraries)))

        unbuilt_libraries = False
        new_libraries = []

        for library in required_libraries:
            if library in _prebuilt_boost_libraries['builder'][sconstruct_id][
                    variant_key]:

                logger.trace(
                    "Library [{}] already present in variant [{}]".format(
                        as_notice(library), as_info(variant_key)))

                # Calling Depends() is required so SCons knows about the relationship, even
                # if the library already exists in the _prebuilt_boost_libraries dict
                logger.trace("Add Depends for [{}]".format(
                    as_notice(_prebuilt_boost_libraries['builder']
                              [sconstruct_id][variant_key][library].path)))
                env.Depends(
                    built_libraries, _prebuilt_boost_libraries['builder']
                    [sconstruct_id][variant_key][library])
            else:
                unbuilt_libraries = True
                new_libraries.append(library)
                _prebuilt_boost_libraries['builder'][sconstruct_id][
                    variant_key][library] = built_libraries_map[library]

            env.Depends(
                target, _prebuilt_boost_libraries['builder'][sconstruct_id]
                [variant_key][library])

        logger.trace("Library sources for variant [{}] = [{}]".format(
            as_info(variant_key),
            colour_items(
                k + ":" + as_info(v.path)
                for k, v in six.iteritems(_prebuilt_boost_libraries['builder']
                                          [sconstruct_id][variant_key]))))

        if unbuilt_libraries:
            # if this is not the first BJAM invocation for this set of libraries make it require (using Requires)
            # the previous BJAM invocation otherwise we already have an invocation of BJAM that will create the
            # required libraries and therefore we can ignore the invocation

            index = len(_bjam_invocations[sconstruct_id])
            previous_invocation = _bjam_invocations[
                sconstruct_id] and _bjam_invocations[sconstruct_id][-1] or None

            if previous_invocation and previous_invocation[
                    'invocation'] != built_libraries:
                logger.debug(
                    "Add BJAM invocation Requires() such that ([{}][{}][{}]) requires ([{}][{}][{}])"
                    .format(as_info(str(index)), as_info(variant_key),
                            colour_items(new_libraries),
                            as_info(str(previous_invocation['index'])),
                            as_info(previous_invocation['variant']),
                            colour_items(previous_invocation['libraries'])))
                env.Requires(built_libraries,
                             previous_invocation['invocation'])
            # if this is the first invocation of BJAM then add it to the list of BJAM invocations, or if this is
            # a different invocation (for different libraries) of BJAM add it to the list of invocations
            if not previous_invocation or previous_invocation[
                    'invocation'] != built_libraries and built_libraries:
                logger.debug(
                    "Adding BJAM invocation [{}] for variant [{}] and new libraries [{}] to invocation list"
                    .format(as_info(str(index)), as_info(variant_key),
                            colour_items(new_libraries)))
                _bjam_invocations[sconstruct_id].append({
                    'invocation':
                    built_libraries,
                    'index':
                    index,
                    'variant':
                    variant_key,
                    'libraries':
                    new_libraries
                })

        bjam = env.Command(bjam_exe(self._boost), [], BuildBjam(self._boost))
        env.NoClean(bjam)

        if built_libraries:

            env.Requires(built_libraries, bjam)

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join(
                    self._boost.local(), env['toolchain'].name() + "._jam")
                toolset_config_jam = env.Command(toolset_target, [],
                                                 WriteToolsetConfigJam())

                project_config_target = os.path.join(self._boost.local(),
                                                     "project-config.jam")
                if not os.path.exists(project_config_target):
                    project_config_jam = env.Requires(
                        project_config_target,
                        env.AlwaysBuild(toolset_config_jam))
                    env.Requires(built_libraries, project_config_jam)

                env.Requires(built_libraries, toolset_config_jam)

        install_dir = linktype == 'shared' and env['abs_final_dir'] or env[
            'abs_build_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug("Install Boost library [{}:{}] to [{}]".format(
                as_notice(library),
                as_info(
                    str(_prebuilt_boost_libraries['builder'][sconstruct_id]
                        [variant_key][library])), as_notice(install_dir)))

            library_node = _prebuilt_boost_libraries['builder'][sconstruct_id][
                variant_key][library]

            logger.trace(
                "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]\n[{}]".format(
                    as_notice(library_node.path), as_notice(str(library_node)),
                    as_notice(str(library_node.get_binfo().bact)),
                    as_notice(str(library_node.get_state())),
                    as_notice(str(library_node.srcnode()))))

            installed_library = env.CopyFiles(
                install_dir, _prebuilt_boost_libraries['builder']
                [sconstruct_id][variant_key][library])

            installed_libraries.append(installed_library)

        logger.debug("Boost 'Installed' Libraries = [{}]".format(
            colour_items(l.path for l in Flatten(installed_libraries))))

        return Flatten(installed_libraries)
Ejemplo n.º 34
0
    def __call__(self, target, source, env):

        logger.trace("target = [{}]".format(
            colour_items([str(node) for node in target])))
        logger.trace("source = [{}]".format(
            colour_items([str(node) for node in source])))

        files_node = next(
            (s for s in source if os.path.splitext(str(s))[1] == ".cov_files"),
            None)
        if files_node:

            if not self._destination:
                self._destination = env['abs_final_dir']
            else:
                self._destination = self._destination + destination_subdir(env)

            variant_index_path = os.path.join(env['abs_final_dir'],
                                              coverage_index_name_from(env))
            variant_summary_path = os.path.splitext(
                variant_index_path)[0] + ".log"
            summary_files = env.Glob(
                os.path.join(env['abs_final_dir'], "coverage--*.log"))

            logger.trace("summary_files = [{}]".format(
                colour_items([str(node) for node in summary_files])))

            with open(variant_index_path, 'w') as variant_index_file:

                coverage = coverage_entry(coverage_file=self.summary_name(env))
                coverage.coverage_context = get_toolchain_variant_dir(env)

                for path in summary_files:
                    with open(str(path), 'r') as summary_file:
                        index_file = summary_file.readline()
                        lines_summary = summary_file.readline()
                        branches_summary = summary_file.readline()

                        coverage.append(
                            CoverageIndexBuilder.get_entry(
                                index_file, lines_summary, branches_summary,
                                get_toolchain_variant_dir(env),
                                get_offset_dir(env), self._destination))

                template = CoverageIndexBuilder.get_template()

                variant_index_file.write(
                    template.render(
                        coverage_summary=coverage,
                        coverage_entries=sorted(
                            coverage.entries,
                            key=lambda entry: entry.coverage_name),
                        LOC=lines_of_code_format,
                    ))

                #coverage--value.html
                #lines: 100.0% (99 out of 99)
                #branches: 50.0% (301 out of 602)

                with open(variant_summary_path, 'w') as variant_summary_file:
                    variant_summary_file.write(
                        "{filename}\n"
                        "lines: {lines_percent}% ({lines_covered} out of {lines_total})\n"
                        "branches: {branches_percent}% ({branches_covered} out of {branches_total})\n"
                        "toolchain_variant_dir: {toolchain_variant_dir}\n"
                        "offset_dir: {offset_dir}\n"
                        "subdir: {subdir}\n"
                        "name: {name}\n".format(
                            filename=os.path.split(variant_index_path)[1],
                            lines_percent=coverage.lines_percent,
                            lines_covered=coverage.lines_covered,
                            lines_total=coverage.lines_total,
                            branches_percent=coverage.branches_percent,
                            branches_covered=coverage.branches_covered,
                            branches_total=coverage.branches_total,
                            toolchain_variant_dir=get_toolchain_variant_dir(
                                env),
                            offset_dir=get_offset_dir(env),
                            subdir=destination_subdir(env),
                            name=sconscript_name(env),
                        ))

                CoverageIndexBuilder.update_coverage(coverage)

            logger.trace(
                "self._destination = [{}], variant_index_path = [{}]".format(
                    as_info(str(self._destination)),
                    as_notice(str(variant_index_path))))

            env.CopyFiles(self._destination, variant_index_path)

        return None
Ejemplo n.º 35
0
def lazy_update_library_list( env, emitting, libraries, built_libraries, add_dependents, linktype, boost, stage_dir ):

    if add_dependents:
        if not emitting:
            libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) )
        else:
            libraries = add_dependent_libraries( boost, linktype, libraries )

    if not stage_dir in built_libraries:
        logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        built_libraries[ stage_dir ] = set( libraries )
    else:
        logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        libraries = [ l for l in libraries if l not in built_libraries[ stage_dir ] ]

    return libraries
Ejemplo n.º 36
0
    def __call__( self, env, target, source, libraries, linktype ):

        variant      = variant_name( env['variant'].name() )
        target_arch  = env['target_arch']
        toolchain    = env['toolchain']
        stage_dir    = stage_directory( toolchain, variant, target_arch, toolchain.abi_flag(env) )

        library_action  = BoostLibraryAction ( env, stage_dir, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config )
        library_emitter = BoostLibraryEmitter( env, stage_dir, libraries, self._add_dependents, linktype, self._boost )

        logger.trace( "env = [{}]".format( as_info( env['build_dir'] ) ) )

        env.AppendUnique( BUILDERS = {
            'BoostLibraryBuilder' : env.Builder( action=library_action, emitter=library_emitter )
        } )

        bjam_exe = 'bjam'
        if platform.system() == "Windows":
            bjam_exe += ".exe"
        bjam_target = os.path.join( self._boost.local(), bjam_exe )
        bjam = env.Command( bjam_target, [], BuildBjam( self._boost ) )
        env.NoClean( bjam )

        built_libraries = env.BoostLibraryBuilder( target, source )

        built_library_map = {}
        for library in built_libraries:
            # Extract the library name from the library filename.
            # Possibly use regex instead?
            name = os.path.split( str(library) )[1]
            name = name.split( "." )[0]
            name = name.split( "-" )[0]
            name = "_".join( name.split( "_" )[1:] )

            built_library_map[name] = library

        logger.trace( "Built Library Map = [{}]".format( colour_items( built_library_map.keys() ) ) )

        variant_key = stage_dir

        logger.trace( "Source Libraries Variant Key = [{}]".format( as_notice( variant_key ) ) )

        if not variant_key in self._library_sources:
             self._library_sources[ variant_key ] = {}

        logger.trace( "Variant sources = [{}]".format( colour_items( self._library_sources[ variant_key ].keys() ) ) )

        required_libraries = add_dependent_libraries( self._boost, linktype, libraries )

        logger.trace( "Required libraries = [{}]".format( colour_items( required_libraries ) ) )

        for library in required_libraries:
            if library in self._library_sources[ variant_key ]:

                logger.trace( "Library [{}] already present in variant [{}]".format( as_notice(library), as_info(variant_key) ) )

                if library not in built_library_map:
                    logger.trace( "Add Depends for [{}]".format( as_notice( self._library_sources[ variant_key ][library].path ) ) )
                    env.Depends( built_libraries, self._library_sources[ variant_key ][library] )
            else:
                self._library_sources[ variant_key ][library] = built_library_map[library]

        logger.trace( "Library sources for variant [{}] = [{}]".format(
                as_info(variant_key),
                colour_items( k+":"+as_info(v.path) for k,v in self._library_sources[ variant_key ].iteritems() )
        ) )

        if built_libraries:

            env.Requires( built_libraries, bjam )

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam" )
                toolset_config_jam = env.Command( toolset_target, [], WriteToolsetConfigJam() )

                project_config_target = os.path.join( self._boost.local(), "project-config.jam" )
                if not os.path.exists( project_config_target ):
                    project_config_jam = env.Requires( project_config_target, env.AlwaysBuild( toolset_config_jam ) )
                    env.Requires( built_libraries, project_config_jam )

                env.Requires( built_libraries, toolset_config_jam )

        install_dir = env['abs_build_dir']

        if linktype == 'shared':
            install_dir = env['abs_final_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug( "Install Boost library [{}:{}] to [{}]".format( as_notice(library), as_info(str(self._library_sources[ variant_key ][library])), as_notice(install_dir) ) )

            library_node = self._library_sources[ variant_key ][library]

            logger.trace( "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]".format(
                    as_notice(library_node.path),
                    as_notice(str(library_node)),
                    as_notice(str(library_node.get_binfo().bact) ),
                    as_notice(str(library_node.get_state()) )
            ) )

            installed_library = env.CopyFiles( install_dir, self._library_sources[ variant_key ][library] )

            installed_libraries.append( installed_library )

        logger.debug( "Boost 'Installed' Libraries = [{}]".format( colour_items( l.path for l in Flatten( installed_libraries ) ) ) )

        return Flatten( installed_libraries )
Ejemplo n.º 37
0
    def build( self, cuppa_env ):

#        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects   = cuppa_env.get_option( 'projects' )
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                        cuppa_env['launch_dir'],
                        [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                )
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info( "Using sub-sconscripts [{}]".format( colour_items( projects ) ) )
            elif projects:
                logger.info( "Using default_projects [{}]".format( colour_items( projects ) ) )

        if projects:

            sconscripts = []

            for project in projects:

                if(     not os.path.exists( project )
                    and not cuppa_env['run_from_launch_dir']
                    and not os.path.isabs( project ) ):

                    path = os.path.join( cuppa_env['launch_dir'], project )

                    if os.path.exists( path ):
                        if os.path.isdir( path ):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project,
                                [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                            )
                            if sub_sconscripts:
                                logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                        project, colour_items( sub_sconscripts )
                                ) )
                                sconscripts.extend( sub_sconscripts )
                        else:
                            sconscripts.append( path )

                elif os.path.exists( project ) and os.path.isdir( project ):
                    sub_sconscripts = self.get_sub_sconscripts(
                            project,
                            [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                    )
                    if sub_sconscripts:
                        logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                project, colour_items( sub_sconscripts )
                        ) )
                        sconscripts.extend( sub_sconscripts )
                else:
                    sconscripts.append( project )

            for toolchain in toolchains:
                build_envs = self.create_build_envs( toolchain, cuppa_env )
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option( 'decider' )
                        if decider:
                            build_env['env'].Decider( decider )
                        self.call_project_sconscript_files( toolchain, build_env['variant'], build_env['target_arch'], build_env['env'], sconscript )

        else:
            logger.warn( "No projects to build. Nothing to be done" )
Ejemplo n.º 38
0
    def __call__(self, env, target, source, libraries, linktype):

        variant = variant_name(env['variant'].name())
        target_arch = env['target_arch']
        toolchain = env['toolchain']
        stage_dir = stage_directory(toolchain, variant, target_arch,
                                    toolchain.abi_flag(env))

        library_action = BoostLibraryAction(env, stage_dir, libraries,
                                            self._add_dependents, linktype,
                                            self._boost, self._verbose_build,
                                            self._verbose_config)
        library_emitter = BoostLibraryEmitter(env, stage_dir, libraries,
                                              self._add_dependents, linktype,
                                              self._boost)

        logger.debug("env = [{}]".format(as_info(env['build_dir'])))

        env.AppendUnique(
            BUILDERS={
                'BoostLibraryBuilder':
                env.Builder(action=library_action, emitter=library_emitter)
            })

        bjam_exe = 'bjam'
        if platform.system() == "Windows":
            bjam_exe += ".exe"
        bjam_target = os.path.join(self._boost.local(), bjam_exe)
        bjam = env.Command(bjam_target, [], BuildBjam(self._boost))
        env.NoClean(bjam)

        built_libraries = env.BoostLibraryBuilder(target, source)

        built_library_map = {}
        for library in built_libraries:
            # Extract the library name from the library filename.
            # Possibly use regex instead?
            name = os.path.split(str(library))[1]
            name = name.split(".")[0]
            name = name.split("-")[0]
            name = "_".join(name.split("_")[1:])

            built_library_map[name] = library

        logger.trace("Built Library Map = [{}]".format(
            colour_items(built_library_map.keys())))

        variant_key = stage_dir

        logger.debug("Source Libraries Variant Key = [{}]".format(
            as_notice(variant_key)))

        if not variant_key in self._library_sources:
            self._library_sources[variant_key] = {}

        logger.debug("Variant sources = [{}]".format(
            colour_items(self._library_sources[variant_key].keys())))

        required_libraries = add_dependent_libraries(self._boost, linktype,
                                                     libraries)

        logger.debug("Required libraries = [{}]".format(
            colour_items(required_libraries)))

        for library in required_libraries:
            if library in self._library_sources[variant_key]:

                logger.debug(
                    "Library [{}] already present in variant [{}]".format(
                        as_notice(library), as_info(variant_key)))

                if library not in built_library_map:
                    logger.debug("Add Depends for [{}]".format(
                        as_notice(
                            self._library_sources[variant_key][library].path)))
                    env.Depends(built_libraries,
                                self._library_sources[variant_key][library])
            else:
                self._library_sources[variant_key][
                    library] = built_library_map[library]

        logger.debug("Library sources for variant [{}] = [{}]".format(
            as_info(variant_key),
            colour_items(
                k + ":" + as_info(v.path)
                for k, v in self._library_sources[variant_key].iteritems())))

        if built_libraries:

            env.Requires(built_libraries, bjam)

            if cuppa.build_platform.name() == "Linux":

                toolset_target = os.path.join(
                    self._boost.local(), env['toolchain'].name() + "._jam")
                toolset_config_jam = env.Command(toolset_target, [],
                                                 WriteToolsetConfigJam())

                project_config_target = os.path.join(self._boost.local(),
                                                     "project-config.jam")
                if not os.path.exists(project_config_target):
                    project_config_jam = env.Requires(
                        project_config_target,
                        env.AlwaysBuild(toolset_config_jam))
                    env.Requires(built_libraries, project_config_jam)

                env.Requires(built_libraries, toolset_config_jam)

        install_dir = env['abs_build_dir']

        if linktype == 'shared':
            install_dir = env['abs_final_dir']

        installed_libraries = []

        for library in required_libraries:

            logger.debug("Install Boost library [{}:{}] to [{}]".format(
                as_notice(library),
                as_info(str(self._library_sources[variant_key][library])),
                as_notice(install_dir)))

            library_node = self._library_sources[variant_key][library]

            logger.trace("Library Node = \n[{}]\n[{}]\n[{}]\n[{}]".format(
                as_notice(library_node.path), as_notice(str(library_node)),
                as_notice(str(library_node.get_binfo().bact)),
                as_notice(str(library_node.get_state()))))

            installed_library = env.CopyFiles(
                install_dir, self._library_sources[variant_key][library])

            installed_libraries.append(installed_library)

        logger.debug("Boost 'Installed' Libraries = [{}]".format(
            colour_items(l.path for l in Flatten(installed_libraries))))

        return Flatten(installed_libraries)
Ejemplo n.º 39
0
    def __init__(self,
                 sconstruct_path,
                 base_path=os.path.abspath('.'),
                 branch_root=None,
                 default_options={},
                 default_projects=[],
                 default_variants=[],
                 default_dependencies=[],
                 default_profiles=[],
                 dependencies=[],
                 profiles=[],
                 default_runner=None,
                 configure_callback=None,
                 tools=[]):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools(tools)

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults(
            dependencies, default_dependencies, "dependencies")
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults(
            profiles, default_profiles, "profiles")

        self.initialise_options(cuppa_env, default_options, profiles,
                                dependencies)
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure(
            cuppa_env, callback=configure_callback)

        enable_thirdparty_logging(
            cuppa_env.get_option('enable-thirdparty-logging') and True
            or False)
        self._set_verbosity_level(cuppa_env)

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env[
            'sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format(cuppa_env)

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option('offline')

        cuppa.version.check_current_version(cuppa_env['offline'])

        if cuppa_env['offline']:
            logger.info(as_info_label("Running in OFFLINE mode"))

        logger.info("using sconstruct file [{}]".format(
            as_notice(cuppa_env['sconstruct_file'])))

        if dependencies_warning:
            logger.warn(dependencies_warning)

        if profiles_warning:
            logger.warn(profiles_warning)

        help = cuppa_env.get_option('help') and True or False

        cuppa_env['minimal_output'] = cuppa_env.get_option('minimal_output')
        cuppa_env['ignore_duplicates'] = cuppa_env.get_option(
            'ignore_duplicates')

        cuppa_env['working_dir'] = os.getcwd()
        cuppa_env['launch_dir'] = os.path.relpath(SCons.Script.GetLaunchDir(),
                                                  cuppa_env['working_dir'])
        cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir'] = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len(cuppa_env['launch_dir'].split(os.path.sep))
            cuppa_env['launch_offset_dir'] = os.path.sep.join(
                ['..' for i in range(levels)])

        cuppa_env['base_path'] = os.path.normpath(
            os.path.expanduser(base_path))
        cuppa_env['branch_root'] = branch_root and os.path.normpath(
            os.path.expanduser(branch_root)) or base_path
        cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath(
            cuppa_env['base_path'], cuppa_env['branch_root']) or None

        thirdparty = cuppa_env.get_option('thirdparty')
        if thirdparty:
            thirdparty = os.path.normpath(os.path.expanduser(thirdparty))

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options(cuppa_env)
        cuppa.core.location_options.process_location_options(cuppa_env)

        cuppa_env['current_branch'] = ''
        cuppa_env['current_revision'] = ''
        if not help and not self._configure.handle_conf_only():
            if cuppa_env['location_match_current_branch']:
                url, repo, branch, remote, rev = cuppa.scms.scms.get_current_rev_info(
                    cuppa_env['sconstruct_dir'])
                if branch:
                    cuppa_env['current_branch'] = branch
                if rev:
                    cuppa_env['current_revision'] = rev
                logger.info(
                    "Current build on branch [{}] at revision [{}] from remote [{}] in [{}] at [{}]"
                    .format(as_info(str(branch)), as_info(str(rev)),
                            as_info(str(remote)), as_info(str(repo)),
                            as_info(str(url))))

        cuppa_env['default_projects'] = default_projects
        cuppa_env['default_variants'] = default_variants and set(
            default_variants) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies']
        cuppa_env['dependencies'] = {}
        cuppa_env[
            'default_profiles'] = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles']
        cuppa_env['profiles'] = {}

        test_runner = cuppa_env.get_option(
            'runner', default=default_runner and default_runner or 'process')
        cuppa_env['default_runner'] = test_runner

        cuppa_env['propagate_env'] = cuppa_env.get_option(
            'propagate-env') and True or False
        cuppa_env['propagate_path'] = cuppa_env.get_option(
            'propagate-path') and True or False
        cuppa_env['merge_path'] = cuppa_env.get_option(
            'merge-path') and True or False
        cuppa_env['show_test_output'] = cuppa_env.get_option(
            'show-test-output') and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option(
            'suppress-process-output') and True or False
        cuppa_env['dump'] = cuppa_env.get_option('dump') and True or False
        cuppa_env['clean'] = cuppa_env.get_option('clean') and True or False

        self.add_variants(cuppa_env)
        self.add_toolchains(cuppa_env)
        self.add_platforms(cuppa_env)

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option('toolchains')
        cuppa_env['target_architectures'] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [
                    cuppa_env[self.toolchains_key][default_toolchain]
                ]
            else:
                toolchains = [
                    cuppa_env[self.toolchains_key][t] for t in toolchains
                ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile(name, profile):
                cuppa_env['profiles'][name] = profile

            def add_dependency(name, dependency):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options("methods", cuppa_env)

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env("dependencies",
                                                      cuppa_env,
                                                      add_dependency)
                cuppa.modules.registration.add_to_env("profiles", cuppa_env,
                                                      add_profile)
                cuppa.modules.registration.add_to_env("methods", cuppa_env)
                cuppa.modules.registration.add_to_env("project_generators",
                                                      cuppa_env)

                for method_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.method.plugins', name=None):
                    method_plugin.load().add_to_env(cuppa_env)

                for profile_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.profile.plugins', name=None):
                    profile_plugin.load().add_to_env(cuppa_env)

                if profiles:
                    for profile in profiles:
                        profile.add_to_env(cuppa_env, add_profile)

                logger.trace("available profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["profiles"].keys()))))

                logger.info("default profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["default_profiles"]),
                                 as_info)))

                for dependency_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.dependency.plugins', name=None):
                    dependency_plugin.load().add_to_env(
                        cuppa_env, add_dependency)

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env(cuppa_env, add_dependency)

                logger.trace("available dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["dependencies"].keys()))))

                logger.info("default dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["default_dependencies"]),
                                 as_info)))

            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info(
                    as_info_label(
                        "Running in DUMP mode, no building will be attempted"))
                cuppa_env.dump()

            job_count = cuppa_env.get_option('num_jobs')
            parallel = cuppa_env.get_option('parallel')
            parallel_mode = "manually"

            if job_count == 1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption('num_jobs', job_count)
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel'] = parallel
            if job_count > 1:
                logger.info(
                    "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"), as_info("jobs"),
                        as_emphasised(parallel_mode),
                        as_info(str(SCons.Script.GetOption('num_jobs')))))

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build(cuppa_env)

        if self._configure.handle_conf_only():
            print(
                "cuppa: Handling configuration only, so no builds will be attempted."
            )
            print(
                "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            )
            print("")
            print("scons -D {}".format(
                self._command_line_from_settings(
                    cuppa_env['configured_options'])))
            print("")
            print("cuppa: Nothing to be done. Exiting.")
            SCons.Script.Exit()
Ejemplo n.º 40
0
    def __init__( self,
                  sconstruct_path,
                  base_path            = os.path.abspath( '.' ),
                  branch_root          = None,
                  default_options      = {},
                  default_projects     = [],
                  default_variants     = [],
                  default_dependencies = [],
                  default_profiles     = [],
                  dependencies         = [],
                  profiles             = [],
                  default_runner       = None,
                  configure_callback   = None,
                  tools                = [] ):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools( tools )

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies" )
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles" )

        self.initialise_options( cuppa_env, default_options, profiles, dependencies )
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback )

        enable_thirdparty_logging( cuppa_env.get_option( 'enable-thirdparty-logging' ) and True or False )
        self._set_verbosity_level( cuppa_env )

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env['sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format( cuppa_env )

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option( 'offline' )

        cuppa.version.check_current_version( cuppa_env['offline'] )

        if cuppa_env['offline']:
            logger.info( as_info_label( "Running in OFFLINE mode" ) )

        logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) )

        if dependencies_warning:
            logger.warn( dependencies_warning )

        if profiles_warning:
            logger.warn( profiles_warning )

        help = cuppa_env.get_option( 'help' ) and True or False

        cuppa_env['minimal_output']       = cuppa_env.get_option( 'minimal_output' )
        cuppa_env['ignore_duplicates']    = cuppa_env.get_option( 'ignore_duplicates' )

        cuppa_env['working_dir']          = os.getcwd()
        cuppa_env['launch_dir']           = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] )
        cuppa_env['run_from_launch_dir']  = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir']    = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len( cuppa_env['launch_dir'].split( os.path.sep ) )
            cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] )

        cuppa_env['base_path']   = os.path.normpath( os.path.expanduser( base_path ) )
        cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path
        cuppa_env['branch_dir']  = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None

        thirdparty = cuppa_env.get_option( 'thirdparty' )
        if thirdparty:
            thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) )

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options( cuppa_env )
        cuppa.core.location_options.process_location_options( cuppa_env )

        cuppa_env['default_projects']     = default_projects
        cuppa_env['default_variants']     = default_variants and set( default_variants ) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH']           = cuppa_env['default_dependencies']
        cuppa_env['dependencies']         = {}
        cuppa_env['default_profiles']     = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE']        = cuppa_env['default_profiles']
        cuppa_env['profiles']             = {}

        test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' )
        cuppa_env['default_runner']  = test_runner

        cuppa_env['propagate_env']       = cuppa_env.get_option( 'propagate-env' )       and True or False
        cuppa_env['propagate_path']      = cuppa_env.get_option( 'propagate-path' )      and True or False
        cuppa_env['merge_path']          = cuppa_env.get_option( 'merge-path' )          and True or False
        cuppa_env['show_test_output']    = cuppa_env.get_option( 'show-test-output' )    and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output' ) and True or False
        cuppa_env['dump']                = cuppa_env.get_option( 'dump' )                and True or False
        cuppa_env['clean']               = cuppa_env.get_option( 'clean' )               and True or False

        self.add_variants   ( cuppa_env )
        self.add_toolchains ( cuppa_env )
        self.add_platforms  ( cuppa_env )

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option( 'toolchains' )
        cuppa_env[ 'target_architectures' ] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ]
            else:
                toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile( name, profile ):
                cuppa_env['profiles'][name] = profile

            def add_dependency( name, dependency ):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options( "methods", cuppa_env )

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env( "dependencies",       cuppa_env, add_dependency )
                cuppa.modules.registration.add_to_env( "profiles",           cuppa_env, add_profile )
                cuppa.modules.registration.add_to_env( "methods",            cuppa_env )
                cuppa.modules.registration.add_to_env( "project_generators", cuppa_env )

                for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
                    method_plugin.load().add_to_env( cuppa_env )

                for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ):
                    profile_plugin.load().add_to_env( cuppa_env )

                if profiles:
                    for profile in profiles:
                        profile.add_to_env( cuppa_env, add_profile )

                logger.trace( "available profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["profiles"].keys() ) )
                ) )

                logger.info( "default profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["default_profiles"] ), as_info )
                ) )

                for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ):
                    dependency_plugin.load().add_to_env( cuppa_env, add_dependency )

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env( cuppa_env, add_dependency )


                logger.trace( "available dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["dependencies"].keys() ) )
                ) )

                logger.info( "default dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["default_dependencies"] ), as_info )
                ) )


            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info( as_info_label( "Running in DUMP mode, no building will be attempted" ) )
                cuppa_env.dump()

            job_count = cuppa_env.get_option( 'num_jobs' )
            parallel  = cuppa_env.get_option( 'parallel' )
            parallel_mode = "manually"

            if job_count==1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption( 'num_jobs', job_count )
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel']  = parallel
            if job_count>1:
                logger.debug( "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"),
                        as_info( "jobs" ),
                        as_emphasised(parallel_mode),
                        as_info( str( SCons.Script.GetOption( 'num_jobs') ) )
                ) )

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build( cuppa_env )

        if self._configure.handle_conf_only():
            print "cuppa: Handling configuration only, so no builds will be attempted."
            print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            print ""
            print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) )
            print ""
            print "cuppa: Nothing to be done. Exiting."
            SCons.Script.Exit()
Ejemplo n.º 41
0
    def create_build_envs(self, toolchain, cuppa_env):

        propagate_environment = cuppa_env['propagate_env']
        propagate_path = cuppa_env['propagate_path']
        merge_path = cuppa_env['merge_path']

        variants = cuppa_env[self.variants_key]
        actions = cuppa_env[self.actions_key]

        target_architectures = cuppa_env['target_architectures']

        if not target_architectures:
            target_architectures = [None]

        def get_active_from_options(tasks):
            active_tasks = {}
            for key, task in tasks.items():
                if cuppa_env.get_option(task.name()):
                    active_tasks[task.name()] = task
            return active_tasks

        active_variants = get_active_from_options(variants)
        active_actions = get_active_from_options(actions)

        def get_active_from_defaults(default_tasks, tasks):
            active_tasks = {}
            for task in default_tasks:
                if task in tasks.keys():
                    active_tasks[task] = tasks[task]
            return active_tasks

        if not active_variants and not active_actions:
            default_variants = cuppa_env[
                'default_variants'] or toolchain.default_variants()
            if default_variants:
                active_variants = get_active_from_defaults(
                    default_variants, variants)
                active_actions = get_active_from_defaults(
                    default_variants, actions)
                if active_variants:
                    logger.info(
                        "Default build variants of [{}] being used.".format(
                            colour_items(active_variants, as_info)))
                if active_actions:
                    logger.info(
                        "Default build actions of [{}] being used.".format(
                            colour_items(active_actions, as_info)))

        if not active_variants:
            active_variants = get_active_from_defaults(
                toolchain.default_variants(), variants)
            logger.info(
                "No active variants specified so toolchain defaults of [{}] being used."
                .format(colour_items(active_variants, as_info)))

        logger.debug("Using active_variants = [{}]".format(
            colour_items(active_variants, as_info)))
        logger.debug("Using active_actions = [{}]".format(
            colour_items(active_actions, as_info)))

        build_envs = []

        for key, variant in active_variants.items():

            for target_arch in target_architectures:

                env, target_arch = toolchain.make_env(cuppa_env, variant,
                                                      target_arch)

                if env:

                    # TODO: Refactor this code out
                    if propagate_environment or propagate_path or merge_path:

                        def merge_paths(default_paths, env_paths):
                            path_set = set(default_paths + env_paths)

                            def record_path(path):
                                path_set.discard(path)
                                return path

                            return [
                                record_path(p)
                                for p in default_paths + env_paths
                                if p in path_set
                            ]

                        def get_paths_from(environment):
                            return 'PATH' in environment and environment[
                                'PATH'].split(os.pathsep) or []

                        default_paths = get_paths_from(env['ENV'])
                        env_paths = get_paths_from(os.environ)
                        if propagate_environment:
                            env['ENV'] = os.environ.copy()
                            logger.debug(
                                "propagating environment for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        as_notice(str(env['ENV']))))
                        if propagate_path and not propagate_environment:
                            env['ENV']['PATH'] = env_paths
                            logger.debug(
                                "propagating PATH for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        colour_items(env_paths)))
                        elif merge_path:
                            merged_paths = merge_paths(default_paths,
                                                       env_paths)
                            env['ENV']['PATH'] = os.pathsep.join(merged_paths)
                            logger.debug(
                                "merging PATH for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        colour_items(merged_paths)))

                    build_envs.append({
                        'variant': key,
                        'target_arch': target_arch,
                        'abi': toolchain.abi(env),
                        'env': env
                    })

                    if not cuppa_env['raw_output']:
                        cuppa.output_processor.Processor.install(env)

                    env['toolchain'] = toolchain
                    env['variant'] = variant
                    env['target_arch'] = target_arch
                    env['abi'] = toolchain.abi(env)
                    env['variant_actions'] = self.get_active_actions(
                        cuppa_env, variant, active_variants, active_actions)

        return build_envs
Ejemplo n.º 42
0
Archivo: git.py Proyecto: ja11sop/cuppa
    def get_branch( cls, path ):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = cls.execute_command( "{git} show -s --pretty=\%d HEAD".format( git=cls.binary() ), path )
        match = re.search( r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result )
        if match:
            branches = [ b.strip() for b in match.group("branches").split(',') ]
            logger.trace( "Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches) ) )
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ','/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace( "Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch) ) )
        else:
            logger.warn( "No branch found from [{}]".format( result ) )

        return branch, remote
Ejemplo n.º 43
0
    def on_progress(cls, progress, sconscript, variant, env, target, source):
        if progress == 'sconstruct_end':

            logger.trace("Destination dirs = [{}]".format(
                colour_items(cls.destination_dirs.keys())))
            logger.trace("cls.all_reports dirs = [{}]".format(
                colour_items(cls.all_reports.keys())))

            for destination_dir, final_dirs in six.iteritems(
                    cls.destination_dirs):

                master_index_path = os.path.join(destination_dir,
                                                 "test-report-index.html")
                master_report_path = os.path.join(destination_dir,
                                                  "test-report-index.json")

                logger.debug("Master test report index path = [{}]".format(
                    as_notice(master_index_path)))

                template = cls.get_template()

                summaries = {}
                summaries['vcs_info'] = initialise_test_linking(
                    env, link_style="raw")
                url, repository, branch, remote, revision = summaries[
                    'vcs_info']
                summaries['name'] = str(env.Dir(destination_dir)) + "/*"
                summaries['title'] = url and url or env['sconstruct_dir']
                summaries['branch'] = branch and branch or "None"
                summaries['commit'] = remote and remote or "None"
                summaries['uri'] = url and url or "Local"
                summaries['toolchain_variants'] = {}
                summaries['reports'] = {}

                for report_dir, json_reports in six.iteritems(cls.all_reports):
                    common, tail1, tail2 = cuppa.path.split_common(
                        report_dir, destination_dir)
                    logger.trace("common, tail1, tail2 = {}, {}, {}".format(
                        as_info(common), as_notice(tail1), as_notice(tail2)))
                    if common and (not tail1 or not tail2):

                        for json_report in json_reports:

                            summary = CollateReportIndexAction._read(
                                str(json_report))

                            toolchain_variant = summary[
                                'toolchain_variant_dir']

                            cls._update_toolchain_variant_summary(
                                summaries, toolchain_variant, summary)

                            summary_name = summary['name']

                            if not summary_name in summaries['reports']:
                                summaries['reports'][summary_name] = {}
                                summaries['reports'][summary_name][
                                    'variants'] = {}

                            summaries['reports'][summary_name]['variants'][
                                toolchain_variant] = summary

                report_list = summaries['reports'].items()
                report_list = sorted(report_list)

                for name, report in report_list:
                    report['default_variant'] = None
                    report['default_summary_rel_path'] = None
                    variant_count = 0
                    status_rank = 0
                    for variant in six.itervalues(report['variants']):
                        variant_count += 1
                        index = cls._ranked_status().index(variant['status'])
                        if index > status_rank:
                            status_rank = index
                        if not report['default_variant']:
                            report['default_variant'] = variant[
                                'toolchain_variant_dir']
                            report['default_summary_rel_path'] = variant[
                                'summary_rel_path']

                    report['variant_count'] = variant_count
                    report['status'] = cls._ranked_status()[status_rank]
                    report[
                        'selector'] = GenerateHtmlReportBuilder._selector_from_name(
                            name)
                    report[
                        'style'] = GenerateHtmlReportBuilder._status_bootstrap_style(
                            report['status'])
                    report[
                        'text_colour'] = GenerateHtmlReportBuilder._status_bootstrap_text_colour(
                            report['status'])

                summaries_json_report = json.dumps(summaries,
                                                   sort_keys=True,
                                                   indent=4,
                                                   separators=(',', ': '))

                logger.trace("summaries = \n{}".format(summaries_json_report))

                with open(master_report_path, 'w') as master_report_file:
                    master_report_file.write(summaries_json_report)

                templateRendered = template.render(summaries=summaries,
                                                   report_list=report_list,
                                                   next=next,
                                                   len=len)

                with open(master_index_path, 'w') as master_index_file:
                    master_index_file.write(encode(templateRendered))
Ejemplo n.º 44
0
    def on_progress( cls, progress, sconscript, variant, env, target, source ):
        if progress == 'sconstruct_end':

            logger.trace( "Destination dirs = [{}]".format( colour_items( cls.destination_dirs.keys() ) ) )
            logger.trace( "cls.all_reports dirs = [{}]".format( colour_items( cls.all_reports.keys() ) ) )

            for destination_dir, final_dirs in cls.destination_dirs.iteritems():

                master_index_path = os.path.join( destination_dir, "test-report-index.html" )
                master_report_path = os.path.join( destination_dir, "test-report-index.json" )

                logger.debug( "Master test report index path = [{}]".format( as_notice( master_index_path ) ) )

                template = cls.get_template()

                summaries = {}
                summaries['vcs_info'] = initialise_test_linking( env, link_style="raw" )
                summaries['name'] = str(env.Dir(destination_dir)) + "/*"
                summaries['title'] = summaries['vcs_info'][0]
                summaries['branch'] = summaries['vcs_info'][2]
                summaries['commit'] = summaries['vcs_info'][4]
                summaries['uri'] = summaries['vcs_info'][0]
                summaries['toolchain_variants'] = {}
                summaries['reports'] = {}

                for report_dir, json_reports in cls.all_reports.iteritems():
                    common, tail1, tail2 = cuppa.path.split_common( report_dir, destination_dir )
                    logger.trace( "common, tail1, tail2 = {}, {}, {}".format( as_info(common), as_notice(tail1), as_notice(tail2) ) )
                    if common and (not tail1 or not tail2):

                        for json_report in json_reports:

                            summary = CollateReportIndexAction._read( str(json_report) )

                            toolchain_variant = summary['toolchain_variant_dir']

                            cls._update_toolchain_variant_summary( summaries, toolchain_variant, summary )

                            summary_name = summary['name']

                            if not summary_name in summaries['reports']:
                                summaries['reports'][summary_name] = {}
                                summaries['reports'][summary_name]['variants'] = {}

                            summaries['reports'][summary_name]['variants'][toolchain_variant] = summary

                report_list = summaries['reports'].items()
                report_list.sort()

                for name, report in report_list:
                    report['default_variant'] = None
                    report['default_summary_rel_path'] = None
                    variant_count = 0
                    status_rank = 0
                    for variant in report['variants'].itervalues():
                        variant_count += 1
                        index = cls._ranked_status().index(variant['status'])
                        if index > status_rank:
                            status_rank = index
                        if not report['default_variant']:
                            report['default_variant'] = variant['toolchain_variant_dir']
                            report['default_summary_rel_path'] = variant['summary_rel_path']

                    report['variant_count'] = variant_count
                    report['status'] = cls._ranked_status()[status_rank]
                    report['selector'] = GenerateHtmlReportBuilder._selector_from_name( name )
                    report['style'] = GenerateHtmlReportBuilder._status_bootstrap_style( report['status'] )
                    report['text_colour'] = GenerateHtmlReportBuilder._status_bootstrap_text_colour( report['status'] )

                summaries_json_report = json.dumps(
                    summaries,
                    sort_keys = True,
                    indent = 4,
                    separators = (',', ': ')
                )

                logger.trace( "summaries = \n{}".format( summaries_json_report ) )

                with open( master_report_path, 'w' ) as master_report_file:
                    master_report_file.write( summaries_json_report )

                with open( master_index_path, 'w' ) as master_index_file:
                    master_index_file.write(
                        template.render(
                            summaries=summaries,
                            report_list=report_list,
                            next=next,
                            len=len
                        ).encode('utf-8')
                    )