Ejemplo n.º 1
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost ):
        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['emitter']:
            _prebuilt_boost_libraries['emitter'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['emitter'][sconstruct_id].keys()) ) )

        self._stage_dir    = stage_dir

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._libraries    = _lazy_update_library_list( env, True, libraries, _prebuilt_boost_libraries['emitter'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location     = boost.local()
        self._boost        = boost
        self._threading    = True

        self._linktype     = linktype
        self._variant      = variant_name( self._env['variant'].name() )
        self._toolchain    = env['toolchain']
Ejemplo n.º 2
0
    def build_library_from_source( self, env, sources=None, library_name=None, linktype=None ):

        from SCons.Script import Flatten

        if not self._source_path and not sources:
            logger.warn( "Attempting to build library when source path is None" )
            return None

        if not library_name:
            library_name = self._name

        if not linktype:
            linktype = self._linktype

        variant_key = env['tool_variant_dir']

        prebuilt_objects   = self.lazy_create_node( variant_key, self._prebuilt_objects )
        prebuilt_libraries = self.lazy_create_node( variant_key, self._prebuilt_libraries )

        local_dir = self._location.local()
        local_folder = self._location.local_folder()

        build_dir = os.path.abspath( os.path.join( env['abs_build_root'], local_folder, env['tool_variant_working_dir'] ) )
        final_dir = os.path.abspath( os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) )

        logger.debug( "build_dir for [{}] = [{}]".format( as_info(self._name), build_dir ) )
        logger.debug( "final_dir for [{}] = [{}]".format( as_info(self._name), final_dir ) )

        obj_suffix = env['OBJSUFFIX']
        obj_builder = env.StaticObject
        lib_builder = env.BuildStaticLib

        if linktype == "shared":
            obj_suffix = env['SHOBJSUFFIX']
            obj_builder = env.SharedObject
            lib_builder = env.BuildSharedLib

        if not sources:
            sources = env.RecursiveGlob( "*.cpp", start=self._source_path, exclude_dirs=[ env['build_dir'] ] )
            sources.extend( env.RecursiveGlob( "*.cc", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )
            sources.extend( env.RecursiveGlob( "*.c", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )

        objects = []
        for source in Flatten( [sources] ):
            rel_path = os.path.relpath( str(source), local_dir )
            rel_obj_path = os.path.splitext( rel_path )[0] + obj_suffix
            obj_path = os.path.join( build_dir, rel_obj_path )
            if not rel_obj_path in prebuilt_objects:
                prebuilt_objects[rel_obj_path] = obj_builder( obj_path, source )
            objects.append( prebuilt_objects[rel_obj_path] )

        if not linktype in prebuilt_libraries:
            library = lib_builder( library_name, objects, final_dir = final_dir )
            if linktype == "shared":
                library = env.Install( env['abs_final_dir'], library )
            prebuilt_libraries[linktype] = library
        else:
            logger.trace( "using existing library = [{}]".format( str(prebuilt_libraries[linktype]) ) )

        return prebuilt_libraries[linktype]
Ejemplo n.º 3
0
    def __init__( self, env, stage_dir, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ):

        self._env = env

        sconstruct_id = env['sconstruct_path']
        global _prebuilt_boost_libraries
        if sconstruct_id not in _prebuilt_boost_libraries['action']:
            _prebuilt_boost_libraries['action'][sconstruct_id] = {}

        logger.trace( "Current Boost build [{}] has the following build variants [{}]".format( as_info(sconstruct_id), colour_items(_prebuilt_boost_libraries['action'][sconstruct_id].keys()) ) )

        logger.debug( "Requested libraries [{}]".format( colour_items( libraries ) ) )

        self._linktype       = linktype
        self._variant        = variant_name( self._env['variant'].name() )
        self._target_arch    = env['target_arch']
        self._toolchain      = env['toolchain']
        self._stage_dir      = stage_dir

        self._libraries = _lazy_update_library_list( env, False, libraries, _prebuilt_boost_libraries['action'][sconstruct_id], add_dependents, linktype, boost, self._stage_dir )

        logger.debug( "Required libraries [{}]".format( colour_items( self._libraries ) ) )

        self._location       = boost.local()
        self._verbose_build  = verbose_build
        self._verbose_config = verbose_config
        self._job_count      = env['job_count']
        self._parallel       = env['parallel']
        self._threading      = True
Ejemplo n.º 4
0
    def __init__(
                self,
                env,
                include_thirdparty,
                exclude_branches,
                excluded_paths_starting,
                place_cbs_by_sconscript,
                exclude_cc_search_paths,
                exclude_cc_sys_search_paths
        ):

        self._include_thirdparty = include_thirdparty
        self._exclude_branches = exclude_branches
        self._excluded_paths_starting = excluded_paths_starting and excluded_paths_starting or []
        self._place_cbs_by_sconscript = place_cbs_by_sconscript
        self._exclude_cc_search_paths = exclude_cc_search_paths
        self._exclude_cc_sys_search_paths = exclude_cc_sys_search_paths

        self._projects = {}

        base_include = self._exclude_branches and env['base_path'] or env['branch_root']

        base = os.path.realpath( base_include )
        download = os.path.realpath( env['download_root'] )

        thirdparty = env['thirdparty'] and os.path.realpath( env['thirdparty'] ) or None

        common, tail1, tail2 = cuppa.path.split_common( base, download )
        download_under_base = common and not tail1

        thirdparty_under_base = None
        if thirdparty:
            common, tail1, tail2 = cuppa.path.split_common( base, thirdparty )
            thirdparty_under_base = common and not tail1

        self._exclude_paths = self._excluded_paths_starting
        self._build_root = [ env['build_root'] ]

        if not self._include_thirdparty:
            if download_under_base:
                self._exclude_paths.append( env['download_root'] )

            if thirdparty and thirdparty_under_base:
                self._exclude_paths.append( env['thirdparty'] )

        self._include_paths = [ base_include ]

        if self._include_thirdparty:
            if not download_under_base:
                self._include_paths.append( env['download_root'] )

            if thirdparty and not thirdparty_under_base:
                self._include_paths.append( env['thirdparty'] )

        self._ignored_types = ignored_types( env )

        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        logger.debug( "Including Paths Under    = {}".format( as_notice( str( self._include_paths ) ) ) )
        logger.debug( "Excluding Paths Starting = {}".format( as_notice( str( self._exclude_paths ) ) ) )
Ejemplo n.º 5
0
    def location_id( cls, env ):
        location = env.get_option( cls.location_option() )
        develop  = env.get_option( cls.develop_option() )
        branch   = env.get_option( cls.branch_option() )

        use_develop = env.get_option( "develop" )

        if not location and cls._default_location:
            location = cls._default_location
        if not location and branch:
            location = env['branch_root']
        if not location and env['thirdparty']:
            location = env['thirdparty']
        if not location:
            logger.debug( "No location specified for dependency [{}]. Dependency not available.".format( cls._name.title() ) )
            return None

        if location:
            location = os.path.expanduser( location )

        if not develop and cls._default_develop:
            develop = cls._default_develop

        if develop:
            develop = os.path.expanduser( develop )

        return (location, develop, branch, use_develop)
Ejemplo n.º 6
0
def _location_from_boost_version( location, offline ):
    if location == "latest" or location == "current":
        location = _determine_latest_boost_verion( offline )
    if location:
        match = re.match( r'(boost_)?(?P<version>\d[._]\d\d(?P<minor>[._]\d)?)(?:[_\-.]rc(?P<release_candidate>\d))?', location )
        if match:
            logger.debug( "Only boost version specified, retrieve from SourceForge if not already cached" )

            extension = ".tar.gz"
            if cuppa.build_platform.name() == "Windows":
                extension = ".zip"

            boost_version = match.group('version')
            if not match.group('minor'):
                boost_version += "_0"
            numeric_version = boost_version.translate( maketrans( '._', '..' ) )

            string_version = boost_version.translate( maketrans( '._', '__' ) )
            if match.group('release_candidate'):
                string_version += "_rc{}".format( match.group('release_candidate') )

            # From 1.71 onwards source files use bintray.com as the primary upload location.
            if packaging_version.parse(numeric_version) > packaging_version.parse("1.70"):
                return "https://dl.bintray.com/boostorg/release/{numeric_version}/source/boost_{string_version}{extension}".format(
                            numeric_version = numeric_version,
                            string_version = string_version,
                            extension = extension
                        )
            else:
                return "http://sourceforge.net/projects/boost/files/boost/{numeric_version}/boost_{string_version}{extension}/download".format(
                            numeric_version = numeric_version,
                            string_version = string_version,
                            extension = extension
                        )
    return location
Ejemplo n.º 7
0
    def location_id(cls, env):
        location = env.get_option(cls.location_option())
        develop = env.get_option(cls.develop_option())
        branch = env.get_option(cls.branch_option())

        use_develop = env.get_option("develop")

        if not location and cls._default_location:
            location = cls._default_location
        if not location and branch:
            location = env['branch_root']
        if not location and env['thirdparty']:
            location = env['thirdparty']
        if not location:
            logger.debug(
                "No location specified for dependency [{}]. Dependency not available."
                .format(cls._name.title()))
            return None

        if location:
            location = os.path.expanduser(location)

        if not develop and cls._default_develop:
            develop = cls._default_develop

        if develop:
            develop = os.path.expanduser(develop)

        return (location, develop, branch, use_develop)
Ejemplo n.º 8
0
def _location_from_boost_version(location, offline):
    if location == "latest" or location == "current":
        location = _determine_latest_boost_verion(offline)
    if location:
        match = re.match(r'(boost_)?(?P<version>\d[._]\d\d(?P<minor>[._]\d)?)',
                         location)
        if match:
            version = match.group('version')
            if not match.group('minor'):
                version += "_0"
            logger.debug(
                "Only boost version specified, retrieve from SourceForge if not already cached"
            )
            extension = ".tar.gz"
            if cuppa.build_platform.name() == "Windows":
                extension = ".zip"

            # Boost 1.71.0 source files are missing from the sourceforge repository.
            if "1.71" in version:
                return "https://dl.bintray.com/boostorg/release/{numeric_version}/source/boost_{string_version}{extension}".format(
                    numeric_version=version.translate(maketrans('._', '..')),
                    string_version=version.translate(maketrans('._', '__')),
                    extension=extension)
            return "http://sourceforge.net/projects/boost/files/boost/{numeric_version}/boost_{string_version}{extension}/download".format(
                numeric_version=version.translate(maketrans('._', '..')),
                string_version=version.translate(maketrans('._', '__')),
                extension=extension)
    return location
Ejemplo n.º 9
0
 def get_cached_archive( self, cache_root, path ):
     logger.debug( "Checking for cached archive [{}]...".format( as_info( path ) ) )
     for archive in os.listdir(cache_root):
         if fnmatch.fnmatch( archive, path ):
             logger.debug( "Found cached archive [{}] skipping download".format( as_info( archive ) ) )
             return os.path.join( cache_root, archive )
     return None
Ejemplo n.º 10
0
    def get_active_actions( self, cuppa_env, current_variant, active_variants, active_actions ):
        available_variants = cuppa_env[ self.variants_key ]
        available_actions  = cuppa_env[ self.actions_key ]
        specified_actions  = {}

        for key, action in available_actions.items():
            if cuppa_env.get_option( action.name() ) or action.name() in active_actions:
                specified_actions[ action.name() ] = action

        if not specified_actions:
            if active_variants:
                for variant_name in active_variants:
                    if available_actions.has_key( variant_name ):
                        specified_actions[ variant_name ] = available_actions[ variant_name ]

        active_actions = {}

        for key, action in specified_actions.items():
            if key not in available_variants:
                active_actions[ key ] = action
            elif key == current_variant.name():
                active_actions[ key ] = action

        logger.debug( "Specifying active_actions of [{}] for variant [{}]".format( colour_items( specified_actions, as_info ), current_variant.name() ) )

        return active_actions
Ejemplo n.º 11
0
 def get_cached_archive( self, cache_root, path ):
     logger.debug( "Checking for cached archive [{}]...".format( as_info( path ) ) )
     for archive in os.listdir(cache_root):
         if fnmatch.fnmatch( archive, path ):
             logger.debug( "Found cached archive [{}] skipping download".format( as_info( archive ) ) )
             return os.path.join( cache_root, archive )
     return None
Ejemplo n.º 12
0
    def __init__( self, cuppa_env, location, branch=None, extra_sub_path=None, name_hint=None ):

        self._location   = location
        self._full_url   = urlparse.urlparse( location )
        self._sub_dir    = ""
        self._name_hint  = name_hint

        if extra_sub_path:
            if os.path.isabs( extra_sub_path ):
                raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) )
            else:
                self._sub_dir = os.path.normpath( extra_sub_path )

        ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to
        ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source
        ## once this is done
        local_directory, use_sub_dir = self.get_local_directory( cuppa_env, location, self._sub_dir, branch, self._full_url )

        self._base_local_directory = local_directory
        self._local_directory = use_sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory

        ## Now that we have a locally accessible version of the dependency we can try to collate some information
        ## about it to allow us to specify what we are building with.
        self._url, self._repository, self._branch, self._revision = self.get_info( self._location, self._local_directory, self._full_url )
        self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path )

        logger.debug( "Using [{}]{} at [{}] stored in [{}]".format(
                as_info( location ),
                ( branch and  ":[{}]".format( as_info(  str(branch) ) ) or "" ),
                as_info( self._version ),
                as_notice( self._local_directory )
        ) )
Ejemplo n.º 13
0
    def get_option( cls, option, default=None ):
        if option in cls._cached_options:
            return cls._cached_options[ option ]

        value = SCons.Script.GetOption( option )
        source = None
        if value == None or value == '':
            if cls._options['default_options'] and option in cls._options['default_options']:
                value = cls._options['default_options'][ option ]
                source = "in the sconstruct file"
            elif default:
                value = default
                source = "using default"
        else:
            source = "on command-line"

        if option in cls._options['configured_options']:
            source = "using configure"

        if value:
            logger.debug( "option [{}] set {} as [{}]".format(
                        as_info( option ),
                        source,
                        as_info( str(value) ) )
            )
        cls._cached_options[option] = value
        return value
Ejemplo n.º 14
0
def apply_patch_if_needed(home, version_string):

    patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt")

    expected_diff_file = os.path.join(
        os.path.split(__file__)[0],
        "boost_test_patch_{}.diff".format(version_string))

    available_diff_files = sorted(glob.glob(
        os.path.join(os.path.split(__file__)[0], "boost_test_patch_*.diff")),
                                  reverse=True)

    for diff_file in available_diff_files:
        if diff_file <= expected_diff_file:
            break

    logger.debug("Using diff file [{}]".format(as_info(diff_file)))

    if os.path.exists(patch_applied_path):
        logger.debug("[{}] already applied".format(as_info(diff_file)))
        return

    command = "patch --batch -p1 --input={}".format(diff_file)

    logger.info("Applying [{}] using [{}] in [{}]".format(
        as_info(diff_file), as_info(command), as_info(home)))

    if subprocess.call(shlex.split(command), cwd=home) != 0:
        logger.error("Could not apply [{}]".format(diff_file))
    else:
        with open(patch_applied_path, "w") as patch_applied_file:
            pass
Ejemplo n.º 15
0
    def get_active_actions(self, cuppa_env, current_variant, active_variants,
                           active_actions):
        available_variants = cuppa_env[self.variants_key]
        available_actions = cuppa_env[self.actions_key]
        specified_actions = {}

        for key, action in available_actions.items():
            if cuppa_env.get_option(
                    action.name()) or action.name() in active_actions:
                specified_actions[action.name()] = action

        if not specified_actions:
            if active_variants:
                for variant_name in active_variants:
                    if variant_name in available_actions.keys():
                        specified_actions[variant_name] = available_actions[
                            variant_name]

        active_actions = {}

        for key, action in specified_actions.items():
            if key not in available_variants:
                active_actions[key] = action
            elif key == current_variant.name():
                active_actions[key] = action

        logger.debug(
            "Specifying active_actions of [{}] for variant [{}]".format(
                colour_items(specified_actions, as_info),
                current_variant.name()))

        return active_actions
Ejemplo n.º 16
0
 def name_from_dir( path ):
     if not os.path.isabs( path ):
         path = os.path.normpath( os.path.join( self._cuppa_env['sconstruct_dir'], path ) )
         logger.debug( "normalised path = [{}]".format( path ) )
     common, tail1, tail2 = split_common( self._cuppa_env['abs_sconscript_dir'], os.path.abspath( path ) )
     logger.debug( "common[{}], tail1[{}], tail2[{}]".format( as_notice( common ), as_notice( tail1 ), as_notice( tail2 ) ) )
     return tail2 and tail2 or ""
Ejemplo n.º 17
0
    def obtain_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
        rev_options = self.get_rev_options( vc_type, vcs_backend )
        action = "Cloning"
        if vc_type == "svn":
            action = "Checking out"
        max_attempts = 2
        attempt = 1
        while attempt <= max_attempts:
            logger.info( "{} [{}] into [{}]{}".format(
                    action,
                    as_info( location ),
                    as_info( local_dir_with_sub_dir ),
                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
            ) )
            try:
                obtain( vcs_backend, local_dir_with_sub_dir, vcs_backend.url )
                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                break
            except pip_exceptions.PipError as error:
                attempt = attempt + 1
                log_as = logger.warn
                if attempt > max_attempts:
                    log_as = logger.error

                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                        as_info( location ),
                        as_notice( local_dir_with_sub_dir ),
                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                        as_error( str(error) )
                ) )
                if attempt > max_attempts:
                    raise LocationException( str(error) )
Ejemplo n.º 18
0
    def get_local_directory( self, location, sub_dir, branch_path, full_url ):

        logger.debug( "Determine local directory for [{location}] when {offline}".format(
                location=as_info(location),
                offline= self._offline and as_info_label("OFFLINE") or "online"
        ) )

        local_directory = None

        base = self._cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( self._cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip_download.url_to_path( location )

        if not pip_is_url( location ):
            return self.get_local_directory_for_non_url( location, sub_dir, branch_path, base )

        else:
            self._local_folder = self.folder_name_from_path( full_url )
            local_directory = os.path.join( base, self._local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                return self.get_local_directory_for_download_url( location, sub_dir, local_directory )

            elif '+' in full_url.scheme:
                return self.get_local_directory_for_repository( location, sub_dir, full_url, local_directory )

            return local_directory
Ejemplo n.º 19
0
 def name_from_dir( path ):
     if not os.path.isabs( path ):
         path = os.path.normpath( os.path.join( cuppa_env['sconstruct_dir'], path ) )
         logger.debug( "normalised path = [{}]".format( path ) )
     common, tail1, tail2 = split_common( cuppa_env['abs_sconscript_dir'], os.path.abspath( path ) )
     logger.debug( "common[{}], tail1[{}], tail2[{}]".format( as_notice( common ), as_notice( tail1 ), as_notice( tail2 ) ) )
     return tail2 and tail2 or ""
Ejemplo n.º 20
0
 def _get_qt4_version( self ):
     command = "pkg-config --modversion QtCore"
     try:
         return subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip()
     except:
         logger.debug( "In _get_qt4_version() failed to execute [{}]".format( command ) )
     return None
Ejemplo n.º 21
0
    def write( self, project ):

        project_file = self._projects[project]['project_file']
        directory    = self._projects[project]['directory']

        logger.debug( "Write [{}] for [{}]".format(
                as_notice( self._projects[project]['project_file'] ),
                as_notice( project )
        ) )

        if directory and not os.path.exists( directory ):
            os.makedirs( directory )

        lines = []
        lines += self._projects[project]['lines_header']

        for target in sorted( self._projects[project]['targets'].itervalues() ):
            lines += target

        lines += [ '\t\t</Build>' ]
        for filepath in sorted( self._projects[project]['files'] ):
            lines += [ '\t\t<Unit filename="' + filepath + '" />' ]

        lines += self._projects[project]['lines_footer']

        with open( project_file, "w" ) as cbs_file:
            cbs_file.write( "\n".join( lines ) )
Ejemplo n.º 22
0
    def write( self, project ):

        project_file = self._projects[project]['project_file']
        directory    = self._projects[project]['directory']

        logger.debug( "Write [{}] for [{}]".format(
                as_notice( self._projects[project]['project_file'] ),
                as_notice( project )
        ) )

        if directory and not os.path.exists( directory ):
            os.makedirs( directory )

        lines = []
        lines += self._projects[project]['lines_header']

        for target in sorted( six.itervalues(self._projects[project]['targets']) ):
            lines += target

        lines += [ '\t\t</Build>' ]
        for filepath in sorted( self._projects[project]['files'] ):
            lines += [ '\t\t<Unit filename="' + filepath + '" />' ]

        lines += self._projects[project]['extensions_block']

        lines += self._projects[project]['lines_footer']

        with open( project_file, "w" ) as cbs_file:
            cbs_file.write( "\n".join( lines ) )
Ejemplo n.º 23
0
    def _get_boost_location( cls, env, location, version, base, patched ):
        logger.debug( "Identify boost using location = [{}], version = [{}], base = [{}], patched = [{}]".format(
                as_info( str(location) ),
                as_info( str(version) ),
                as_info( str(base) ),
                as_info( str(patched) )
        ) )

        boost_home = None
        boost_location = None

        extra_sub_path = 'clean'
        if patched:
            extra_sub_path = 'patched'

        if location:
            location = cls.location_from_boost_version( location )
            if not location: # use version as a fallback in case both at specified
                location = cls.location_from_boost_version( version )
            boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path, name_hint="boost" )

        elif base: # Find boost locally
            if not os.path.isabs( base ):
                base = os.path.abspath( base )

            if not version:
                boost_home = base
            elif version:
                search_list = [
                    os.path.join( base, 'boost', version, 'source' ),
                    os.path.join( base, 'boost', 'boost_' + version ),
                    os.path.join( base, 'boost', version ),
                    os.path.join( base, 'boost_' + version ),
                ]

                def exists_in( locations ):
                    for location in locations:
                        home = cls._home_from_path( location )
                        if home:
                            return home
                    return None

                boost_home = exists_in( search_list )
                if not boost_home:
                    raise BoostException("Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]".format(version, str([l for l in search_list])))
            else:
                raise BoostException("Cannot construct Boost Object. No Home or Version specified")

            logger.debug( "Using boost found at [{}]".format( as_info( boost_home ) ) )
            boost_location = cuppa.location.Location( env, boost_home, extra_sub_path=extra_sub_path )
        else:
            location = cls.location_from_boost_version( version )
            boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path )

        if patched:
            cls.apply_patch_if_needed( boost_location.local() )

        return boost_location
Ejemplo n.º 24
0
    def get_rev_options(self, vc_type, vcs_backend, local_remote=None):
        url, rev = get_url_rev(vcs_backend)

        logger.debug(
            "make_rev_options for [{}] at url [{}] with rev [{}]/[{}]".format(
                as_info(vc_type), as_notice(str(url)), as_notice(str(rev)),
                as_notice(str(local_remote))))

        return make_rev_options(vc_type, vcs_backend, url, rev, local_remote)
Ejemplo n.º 25
0
    def get_local_directory_for_download_url(self, location, sub_dir,
                                             local_directory):

        logger.debug("[{}] is an archive download".format(as_info(location)))

        local_dir_with_sub_dir = os.path.join(local_directory,
                                              sub_dir and sub_dir or "")

        # First we check to see if we already downloaded and extracted this archive before
        if os.path.exists(local_dir_with_sub_dir):
            try:
                # If not empty this will fail
                os.rmdir(local_dir_with_sub_dir)
            except:
                # Not empty so we'll return this as the local_directory

                logger.debug("(already present) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(already present) Local folder = [{}]".format(
                    as_info(str(self._local_folder))))

                return local_directory

        if self._cuppa_env['dump'] or self._cuppa_env['clean']:
            return local_directory

        # If not we then check to see if we cached the download
        cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'],
                                                 self._local_folder)
        if cached_archive:
            logger.debug("Cached archive [{}] found for [{}]".format(
                as_info(cached_archive), as_info(location)))
            self.extract(cached_archive, local_dir_with_sub_dir)
        else:
            logger.info("Downloading [{}]...".format(as_info(location)))
            try:
                report_hook = None
                if logger.isEnabledFor(logging.INFO):
                    report_hook = ReportDownloadProgress()
                filename, headers = urlretrieve(location,
                                                reporthook=report_hook)
                name, extension = os.path.splitext(filename)
                logger.info("[{}] successfully downloaded to [{}]".format(
                    as_info(location), as_info(filename)))
                self.extract(filename, local_dir_with_sub_dir)
                if self._cuppa_env['cache_root']:
                    cached_archive = os.path.join(
                        self._cuppa_env['cache_root'], self._local_folder)
                    logger.debug("Caching downloaded file as [{}]".format(
                        as_info(cached_archive)))
                    shutil.copyfile(filename, cached_archive)
            except ContentTooShortError as error:
                logger.error("Download of [{}] failed with error [{}]".format(
                    as_error(location), as_error(str(error))))
                raise LocationException(error)

        return local_directory
Ejemplo n.º 26
0
    def add_to_env( cls, env, add_toolchain, add_to_supported ):
        for version in cls.supported_versions():
            add_to_supported( version )

        for version, gcc in cls.available_versions().iteritems():
            logger.debug(
                "Adding toolchain [{}] reported as [{}] with cxx_version [g++{}] at [{}]"
                .format( as_info(version), as_info(gcc['version']), as_info(gcc['cxx_version']), as_notice(gcc['path']) )
            )
            add_toolchain( version, cls( version, gcc['cxx_version'], gcc['version'], gcc['path'] ) )
Ejemplo n.º 27
0
    def add_to_env( cls, env, add_toolchain, add_to_supported ):
        for version in cls.supported_versions():
            add_to_supported( version )

        for version, gcc in cls.available_versions().iteritems():
            logger.debug(
                "Adding toolchain [{}] reported as [{}] with cxx_version [g++{}] at [{}]"
                .format( as_info(version), as_info(gcc['version']), as_info(gcc['cxx_version']), as_notice(gcc['path']) )
            )
            add_toolchain( version, cls( version, gcc['cxx_version'], gcc['version'], gcc['path'] ) )
Ejemplo n.º 28
0
    def _write(self,
               destination_path,
               env,
               test_suites,
               sort_test_cases=False):

        logger.debug("Write HTML report for {}".format(destination_path))

        name = self._summary_name(env, destination_path)
        tests_title = name

        test_summary = self._create_test_summary(name)
        test_summary['toolchain_variant_dir'] = env['tool_variant_dir']
        test_summary['summary_rel_path'] = os.path.join(
            destination_subdir(env),
            os.path.split(destination_path)[1])

        test_suite_list = sorted(test_suites.values(),
                                 key=lambda test_suite: test_suite["name"])

        for test_suite in test_suite_list:

            self._add_render_fields(test_suite)
            if sort_test_cases:
                test_suite['test_cases'] = sorted(
                    test_suite['test_cases'], key=lambda test: test["name"])

            for test_case in test_suite['test_cases']:
                self._add_render_fields(test_case)
                if test_case['stdout']:
                    escaped_stdout = (escape(line).rstrip()
                                      for line in test_case['stdout'])
                    test_case['stdout'] = escaped_stdout
                test_case['uri'] = self._create_uri(test_case)

            self._update_summary_stats(test_summary, test_suite, "test_suite")

        self._add_render_fields(test_summary)

        summary_path = self._summary_path(destination_path)
        with open(summary_path, 'w') as summary_file:
            json.dump(test_summary,
                      summary_file,
                      sort_keys=True,
                      indent=4,
                      separators=(',', ': '))

        template = self.get_template()

        templateRendered = template.render(tests_title=tests_title,
                                           test_summary=test_summary,
                                           test_suites=test_suite_list)

        with open(destination_path, 'w') as test_suite_index:
            test_suite_index.write(encode(templateRendered))
Ejemplo n.º 29
0
    def get_rev_options( self, vc_type, vcs_backend, local_remote=None ):
        url, rev = get_url_rev( vcs_backend )

        logger.debug( "make_rev_options for [{}] at url [{}] with rev [{}]/[{}]".format(
            as_info( vc_type ),
            as_notice( str(url) ),
            as_notice( str(rev) ),
            as_notice( str(local_remote) )
        ) )

        return make_rev_options( vc_type, vcs_backend, url, rev, local_remote )
Ejemplo n.º 30
0
    def on_sconstruct_end( self, env ):
        workspace_dir = os.path.join( env['working_dir'], "cbs" )
        workspace_path = os.path.join( workspace_dir, "all.workspace" )

        if workspace_dir and not os.path.exists( workspace_dir ):
            os.makedirs( workspace_dir )

        logger.debug( "Write workspace [{}]".format( as_notice( workspace_path ) ) )

        with open( workspace_path, "w" ) as workspace_file:
            workspace_file.write( "\n".join( self.create_workspace( self._projects ) ) )
Ejemplo n.º 31
0
    def on_sconstruct_end( self, env ):
        workspace_dir = os.path.join( env['working_dir'], "cbs" )
        workspace_path = os.path.join( workspace_dir, "all.workspace" )

        if workspace_dir and not os.path.exists( workspace_dir ):
            os.makedirs( workspace_dir )

        logger.debug( "Write workspace [{}]".format( as_notice( workspace_path ) ) )

        with open( workspace_path, "w" ) as workspace_file:
            workspace_file.write( "\n".join( self.create_workspace( self._projects ) ) )
    def location_id( cls, env ):
        location = env.get_option( cls._name + "-location" )
        branch   = env.get_option( cls._name + "-branch" )

        if not location and branch:
            location = env['branch_root']
        if not location and branch:
            location = env['thirdparty']
        if not location:
            logger.debug( "No location specified for dependency [{}]. Dependency not available.".format( cls._name.title() ) )
            return None

        return (location, branch)
Ejemplo n.º 33
0
    def _write( self, destination_path, env, test_suites, sort_test_cases=False ):

        logger.debug( "Write HTML report for {}".format( destination_path ) )

        name = self._summary_name( env, destination_path )
        tests_title = name

        test_summary = self._create_test_summary( name )
        test_summary['toolchain_variant_dir'] = env['tool_variant_dir']
        test_summary['summary_rel_path'] = os.path.join( destination_subdir( env ), os.path.split( destination_path )[1] )

        test_suite_list = sorted( test_suites.values(), key=lambda test_suite: test_suite["name"] )

        for test_suite in test_suite_list:

            self._add_render_fields( test_suite )
            if sort_test_cases:
                test_suite['test_cases'] = sorted( test_suite['test_cases'], key=lambda test: test["name"] )

            for test_case in test_suite['test_cases']:
                self._add_render_fields( test_case )
                if test_case['stdout']:
                    escaped_stdout = ( cgi.escape(line).rstrip() for line in test_case['stdout'] )
                    test_case['stdout'] = escaped_stdout
                test_case['uri'] = self._create_uri( test_case )

            self._update_summary_stats( test_summary, test_suite, "test_suite" )

        self._add_render_fields( test_summary )

        summary_path = self._summary_path( destination_path )
        with open( summary_path, 'w' ) as summary_file:
            json.dump(
                test_summary,
                summary_file,
                sort_keys = True,
                indent = 4,
                separators = (',', ': ')
            )

        template = self.get_template()

        with open( destination_path, 'w' ) as test_suite_index:
            test_suite_index.write(
                template.render(
                    tests_title = tests_title,
                    test_summary = test_summary,
                    test_suites = test_suite_list,
                ).encode('utf-8')
            )
Ejemplo n.º 34
0
 def _set_qt5_dir(self, env):
     command = "pkg-config --cflags Qt5Core"
     try:
         cflags = subprocess.check_output(shlex.split(command), stderr=subprocess.STDOUT).strip()
         if cflags:
             flags = env.ParseFlags(cflags)
             if "CPPPATH" in flags:
                 shortest_path = flags["CPPPATH"][0]
                 for include in flags["CPPPATH"]:
                     if len(include) < len(shortest_path):
                         shortest_path = include
                 env["QT5DIR"] = shortest_path
     except:
         logger.debug("In _set_qt5_dir() failed to execute [{}]".format(command))
Ejemplo n.º 35
0
def determine_latest_boost_verion():
    current_release = "1.61.0"
    try:
        html = lxml.html.parse('http://www.boost.org/users/download/')

        current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text
        current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) )

        logger.debug( "latest boost release detected as [{}]".format( as_info( current_release ) ) )

    except Exception as e:
        logger.warn( "cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) )

    return current_release
Ejemplo n.º 36
0
    def get_local_directory_for_non_url( self, location, sub_dir, branch_path, base ):

        if pip_is_archive_file( location ):

            self._local_folder = self.folder_name_from_path( location )
            local_directory = os.path.join( base, self._local_folder )

            local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

            if os.path.exists( local_dir_with_sub_dir ):
                try:
                    os.rmdir( local_dir_with_sub_dir )
                except:
                    return local_directory

            self.extract( location, local_dir_with_sub_dir )
            logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) )
            logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) )

        else:
            local_directory = branch_path and os.path.join( location, branch_path ) or location
            self._local_folder = self.folder_name_from_path( location )

            logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) )
            logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) )

        return local_directory
Ejemplo n.º 37
0
    def __init__( self, env, include_thirdparty, exclude_branches, excluded_paths_starting, place_cbs_by_sconscript ):

        self._include_thirdparty = include_thirdparty
        self._exclude_branches = exclude_branches
        self._excluded_paths_starting = excluded_paths_starting and excluded_paths_starting or []
        self._place_cbs_by_sconscript = place_cbs_by_sconscript

        self._projects = {}

        base_include = self._exclude_branches and env['base_path'] or env['branch_root']

        base = os.path.realpath( base_include )
        download = os.path.realpath( env['download_root'] )

        thirdparty = env['thirdparty'] and os.path.realpath( env['thirdparty'] ) or None

        common, tail1, tail2 = cuppa.path.split_common( base, download )
        download_under_base = common and not tail1

        thirdparty_under_base = None
        if thirdparty:
            common, tail1, tail2 = cuppa.path.split_common( base, thirdparty )
            thirdparty_under_base = common and not tail1

        self._exclude_paths = self._excluded_paths_starting
        self._build_root = [ env['build_root'] ]

        if not self._include_thirdparty:
            if download_under_base:
                self._exclude_paths.append( env['download_root'] )

            if thirdparty and thirdparty_under_base:
                self._exclude_paths.append( env['thirdparty'] )

        self._include_paths = [ base_include ]

        if self._include_thirdparty:
            if not download_under_base:
                self._include_paths.append( env['download_root'] )

            if thirdparty and not thirdparty_under_base:
                self._include_paths.append( env['thirdparty'] )

        self._ignored_types = ignored_types( env )

        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        logger.debug( "Including Paths Under    = {}".format( as_notice( str( self._include_paths ) ) ) )
        logger.debug( "Excluding Paths Starting = {}".format( as_notice( str( self._exclude_paths ) ) ) )
Ejemplo n.º 38
0
    def location_id(cls, env):
        location = env.get_option(cls._name + "-location")
        branch = env.get_option(cls._name + "-branch")

        if not location and branch:
            location = env['branch_root']
        if not location and branch:
            location = env['thirdparty']
        if not location:
            logger.debug(
                "No location specified for dependency [{}]. Dependency not available."
                .format(cls._name.title()))
            return None

        return (location, branch)
Ejemplo n.º 39
0
 def _set_qt4_dir( self, env ):
     command = "pkg-config --cflags QtCore"
     try:
         cflags = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip()
         if cflags:
             flags = env.ParseFlags( cflags )
             if 'CPPPATH' in flags:
                 shortest_path = flags['CPPPATH'][0]
                 for include in flags['CPPPATH']:
                     if len(include) < len(shortest_path):
                         shortest_path = include
                 env['QT4DIR'] = shortest_path
             logger.debug( "Q4DIR detected as [{}]".format( as_info( env['QT4DIR'] ) ) )
     except:
         logger.debug( "In _set_qt4_dir() failed to execute [{}]".format( command ) )
Ejemplo n.º 40
0
def lazy_update_library_list( env, emitting, libraries, built_libraries, add_dependents, linktype, boost, stage_dir ):

    if add_dependents:
        if not emitting:
            libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) )
        else:
            libraries = add_dependent_libraries( boost, linktype, libraries )

    if not stage_dir in built_libraries:
        logger.debug( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        built_libraries[ stage_dir ] = set( libraries )
    else:
        logger.debug( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) )
        libraries = [ l for l in libraries if l not in built_libraries[ stage_dir ] ]

    return libraries
Ejemplo n.º 41
0
    def __call__( self, target, source, env ):

        build_script_path = os.path.join( self._location, 'tools', 'build' )

        if self._version < 1.47:
            build_script_path = os.path.join( build_script_path, 'src', 'v2', 'engine' )

        elif self._version > 1.55:
            build_script_path = os.path.join( build_script_path, 'src', 'engine' )

        else:
            build_script_path = os.path.join( build_script_path, 'v2', 'engine' )

        bjam_build_script = './build.sh'
        if platform.system() == "Windows":
            bjam_build_script = os.path.join( build_script_path, 'build.bat' )

        logger.debug( "Execute [{}] from [{}]".format(
                bjam_build_script,
                str(build_script_path)
        ) )

        process_bjam_build = ProcessBjamBuild()

        try:
            IncrementalSubProcess.Popen(
                process_bjam_build,
                [ bjam_build_script ],
                cwd=build_script_path
            )

            bjam_exe_path = process_bjam_build.exe_path()

            if not bjam_exe_path:
                logger.critical( "Could not determine bjam exe path" )
                return 1

            bjam_binary_path = os.path.join( build_script_path, bjam_exe_path )

            shutil.copy( bjam_binary_path, target[0].path )

        except OSError as error:
            logger.critical( "Error building bjam [{}]".format( str( error.args ) ) )
            return 1

        return None
Ejemplo n.º 42
0
 def make_rev_options( vc_type, vcs_backend, url, rev, local_remote ):
     logger.debug( "vc_type={vc_type}, url={url}, rev={rev}, local_remote={local_remote}".format(
         vc_type = as_info( str(vc_type) ),
         url = as_notice( str(url) ),
         rev = as_notice( str(rev) ),
         local_remote = as_notice( str(local_remote) )
     ) )
     if vc_type == 'git':
         if rev:
             return vcs_backend.make_rev_options( rev=rev )
         #elif local_remote:
             #return vcs_backend.make_rev_options( rev=local_remote )
     elif vc_type == 'hg' and rev:
         return vcs_backend.make_rev_options( rev=rev )
     elif vc_type == 'bzr' and rev:
         return vcs_backend.make_rev_options( rev=rev )
     return vcs_backend.make_rev_options()
Ejemplo n.º 43
0
 def remove_common_top_directory_under(cls, path):
     dirs = os.listdir(path)
     if not dirs:
         raise LocationException(
             "Uncompressed archive [{}] is empty".format(path))
     top_dir = os.path.join(path, dirs[0])
     if len(dirs) == 1 and os.path.isdir(top_dir):
         logger.debug(
             "Removing redundant top directory [{}] from [{}]".format(
                 as_info(dirs[0]), as_info(path)))
         # we have a single top-level directory
         move_dirs = os.listdir(top_dir)
         for d in move_dirs:
             shutil.move(os.path.join(top_dir, d), os.path.join(path, d))
         shutil.rmtree(top_dir)
         return True
     return False
Ejemplo n.º 44
0
 def remove_common_top_directory_under( self, path ):
     dirs = os.listdir( path )
     if not dirs:
         raise LocationException( "Uncompressed archive [{}] is empty".format( path ) )
     top_dir = os.path.join( path, dirs[0] )
     if len(dirs) == 1 and os.path.isdir( top_dir ):
         logger.debug( "Removing redundant top directory [{}] from [{}]".format(
                 as_info( dirs[0] ),
                 as_info( path ) )
         )
         # we have a single top-level directory
         move_dirs = os.listdir( top_dir )
         for d in move_dirs:
             shutil.move( os.path.join( top_dir, d ), os.path.join( path, d ) )
         shutil.rmtree( top_dir )
         return True
     return False
Ejemplo n.º 45
0
 def make_rev_options(vc_type, vcs_backend, url, rev, local_remote):
     logger.debug(
         "vc_type={vc_type}, url={url}, rev={rev}, local_remote={local_remote}"
         .format(vc_type=as_info(str(vc_type)),
                 url=as_notice(str(url)),
                 rev=as_notice(str(rev)),
                 local_remote=as_notice(str(local_remote))))
     if vc_type == 'git':
         if rev:
             return vcs_backend.make_rev_options(rev=rev)
         #elif local_remote:
         #return vcs_backend.make_rev_options( rev=local_remote )
     elif vc_type == 'hg' and rev:
         return vcs_backend.make_rev_options(rev=rev)
     elif vc_type == 'bzr' and rev:
         return vcs_backend.make_rev_options(rev=rev)
     return vcs_backend.make_rev_options()
Ejemplo n.º 46
0
    def extract( cls, filename, target_dir ):
        os.makedirs( target_dir )
        if tarfile.is_tarfile( filename ):
            logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) )
            try:
                with tarfile.TarFile( filename ) as tf:
                    tf.extractall( target_dir )
            except tarfile.ReadError:
                command = "tar -xf {filename}".format( filename=filename )
                if subprocess.call( shlex.split( command ), cwd=target_dir ) != 0:
                    raise LocationException( "Could not untar downloaded file from [{}]".format( filename ) )

        if zipfile.is_zipfile( filename ):
            logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) )
            with zipfile.ZipFile( filename ) as zf:
                zf.extractall( target_dir )

        while cls.remove_common_top_directory_under( target_dir ):
            pass
Ejemplo n.º 47
0
    def extract( self, filename, target_dir ):
        os.makedirs( target_dir )
        if tarfile.is_tarfile( filename ):
            logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) )
            try:
                with tarfile.TarFile( filename ) as tf:
                    tf.extractall( target_dir )
            except tarfile.ReadError:
                command = "tar -xf {filename}".format( filename=filename )
                if subprocess.call( shlex.split( command ), cwd=target_dir ) != 0:
                    raise LocationException( "Could not untar downloaded file from [{}]".format( filename ) )

        if zipfile.is_zipfile( filename ):
            logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) )
            with zipfile.ZipFile( filename ) as zf:
                zf.extractall( target_dir )

        while self.remove_common_top_directory_under( target_dir ):
            pass
Ejemplo n.º 48
0
 def location_from_boost_version( cls, location ):
     if location == "latest" or location == "current":
         location = determine_latest_boost_verion()
     if location:
         match = re.match( r'(boost_)?(?P<version>\d[._]\d\d(?P<minor>[._]\d)?)', location )
         if match:
             version = match.group('version')
             if not match.group('minor'):
                 version += "_0"
             logger.debug( "Only boost version specified, retrieve from SourceForge if not already cached" )
             extension = ".tar.gz"
             if cuppa.build_platform.name() == "Windows":
                 extension = ".zip"
             return "http://sourceforge.net/projects/boost/files/boost/{numeric_version}/boost_{string_version}{extension}/download".format(
                         numeric_version = version.translate( string.maketrans( '._', '..' ) ),
                         string_version = version.translate( string.maketrans( '._', '__' ) ),
                         extension = extension
                     )
     return location
Ejemplo n.º 49
0
def _location_from_boost_version( location, offline ):
    if location == "latest" or location == "current":
        location = _determine_latest_boost_verion( offline )
    if location:
        match = re.match( r'(boost_)?(?P<version>\d[._]\d\d(?P<minor>[._]\d)?)', location )
        if match:
            version = match.group('version')
            if not match.group('minor'):
                version += "_0"
            logger.debug( "Only boost version specified, retrieve from SourceForge if not already cached" )
            extension = ".tar.gz"
            if cuppa.build_platform.name() == "Windows":
                extension = ".zip"
            return "http://sourceforge.net/projects/boost/files/boost/{numeric_version}/boost_{string_version}{extension}/download".format(
                        numeric_version = version.translate( string.maketrans( '._', '..' ) ),
                        string_version = version.translate( string.maketrans( '._', '__' ) ),
                        extension = extension
                    )
    return location
Ejemplo n.º 50
0
 def update_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
     url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
     rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
     version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
     logger.info( "Updating [{}] in [{}]{} at [{}]".format(
             as_info( location ),
             as_notice( local_dir_with_sub_dir ),
             ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
             as_info( version )
     ) )
     try:
         update( vcs_backend, local_dir_with_sub_dir, rev_options )
         logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
     except pip_exceptions.PipError as error:
         logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                 as_warning( location ),
                 as_warning( local_dir_with_sub_dir ),
                 ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                 as_warning( str(error) )
         ) )
Ejemplo n.º 51
0
    def _get_location( cls, env ):

        import SCons.Errors

        location_id = cls.location_id( env )
        if not location_id:
            return None
        if location_id not in cls._cached_locations:
            location = location_id[0]
            develop = location_id[1]
            branch = location_id[2]
            use_develop = location_id[3]
            try:
                cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path )
                logger.debug( "Adding location [{}]({}) to cached locations".format(
                        as_notice( cls._name.title() ),
                        as_notice( str(location_id) )
                ) )
            except cuppa.location.LocationException as error:
                logger.error(
                        "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]"
                        .format(
                                as_notice( cls._name.title() ),
                                as_info( str(location) ),
                                as_info( str(develop) ),
                                as_notice( str(use_develop and True or False) ),
                                as_notice( str(branch) ),
                                as_notice( str(cls._extra_sub_path) ),
                                as_error( str(error) )
                        )
                )
                raise SCons.Errors.StopError( error )
        else:
            logger.debug( "Loading location [{}]({}) from cached locations".format(
                    as_notice( cls._name.title() ),
                    as_notice( str(location_id) )
            ) )

        return cls._cached_locations[location_id]
Ejemplo n.º 52
0
    def __init__( self, env ):

        self._version = "4"

        if cuppa.build_platform.name() in ["Darwin", "Linux"]:
            if cuppa.output_processor.command_available( "pkg-config" ):
                if 'QT4DIR' not in env:
                    self._set_qt4_dir( env )
                self._version = self._get_qt4_version()

        elif cuppa.build_platform.name() == "Windows":
            if 'QT4DIR' not in env:
                paths = glob.glob( 'C:\\Qt\\4.*\\*' )
                if len(paths):
                    paths.sort()
                    env['QT4DIR'] = paths[-1]

        if 'QT4DIR' not in env:
            logger.error( "could not detect QT4 installation" )
            raise Qt4Exception( "could not detect QT4 installation." )

        logger.debug( "Q4DIR detected as [{}]".format( as_info( env['QT4DIR'] ) ) )
Ejemplo n.º 53
0
    def add_to_env( cls, env, add_toolchain, add_to_supported ):
        stdlib = None
        try:
            stdlib = env.get_option( 'clang-stdlib' )
            suppress_debug_for_auto = env.get_option( 'clang-disable-debug-for-auto' )
        except:
            pass

        for version in cls.supported_versions():
            add_to_supported( version )

        for version, clang in cls.available_versions().iteritems():
            logger.debug(
                    "Adding toolchain [{}] reported as [{}] with cxx_version [clang++{}] at [{}]".format(
                    as_info(version),
                    as_info(clang['version']),
                    as_info(clang['cxx_version']),
                    as_notice(clang['path'])
            ) )
            add_toolchain(
                    version,
                    cls( version, clang['cxx_version'], clang['version'], clang['path'], stdlib, suppress_debug_for_auto )
            )