def __init__( self, env ):
        self._flags = {}
        self._flags['INCPATH'] = [ os.path.join( self._location.local(), "include" ) ]

        pg_config = "pg_config"
        if platform.system() == "Windows":
            pg_config = pg_config + ".exe"
            if not cuppa.output_processor.command_available( pg_config ):
                # try to find the Postgresql install
                program_files = os.environ.get( "ProgramW6432" )
                postgresql_base = os.path.join( program_files, "PostgreSQL" )
                if os.path.exists( postgresql_base ):
                    paths = glob.glob( postgresql_base + '\\*' )
                    if len(paths):
                        paths.sort()
                        latest = paths[-1]
                        pg_config = '\"' + os.path.join( latest, "bin", pg_config ) + '\"'

        if cuppa.output_processor.command_available( pg_config ):
            command = "{pg_config} --includedir".format( pg_config = pg_config )
            libpq_include = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip()
            self._flags['INCPATH'].append( libpq_include )

            command = "{pg_config} --libdir".format( pg_config = pg_config )
            libpq_libpath = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip()
            self._flags['LIBPATH'] = [ libpq_libpath ]
        else:
            logger.warn( "postgresql: pg_config not available so cannot determine LIBPATH for postgres libraries" )
            self._flags['LIBPATH'] = []

        self._flags['DYNAMICLIBS'] = [ 'pq' ]

        self._src_path = os.path.join( self._location.local(), "src" )

        env.AddMethod( QuincePostgresqlLibraryMethod( self._location.local(), self._src_path ), "QuincePostgresqlLibrary" )
Example #2
0
def info( path ):
    if not path:
        raise SubversionException("No working copy path specified for calling svnversion with.")

    url        = None
    repository = None
    branch     = None
    revision   = None

    try:
        command = "svn info {}".format( path )
        svn_info = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT )
        url        = re.search( r'URL: ([^\s]+)', svn_info ).expand(r'\1')
        repository = re.search( r'Repository Root: ([^\s]+)', svn_info ).expand(r'\1')
        branch     = re.search( r'Relative URL: \^/([^\s]+)', svn_info ).expand(r'\1')
        revision   = re.search( r'Revision: (\d+)', svn_info ).expand(r'\1')
    except subprocess.CalledProcessError:
        raise SubversionException("Not a Subversion working copy")

    try:
        command = "svnversion -n {}".format( path )
        revision = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT )
    except subprocess.CalledProcessError:
        pass
    except OSError:
        logger.warn( "The {} binary is not available. Consider installing it.".format( as_warning("svnversion") ) )

    return url, repository, branch, revision
Example #3
0
    def __call__(self, env, libraries):
        if not self._add_dependents:
            logger.warn(
                "BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead"
            )
        libraries = Flatten([libraries])

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith('boost')
        Boost = env['dependencies']['boost'](env)

        for library in libraries:
            if library.startswith('log'):
                env.AppendUnique(CPPDEFINES='BOOST_LOG_DYN_LINK')
            elif library == 'chrono':
                env.AppendUnique(CPPDEFINES='BOOST_CHRONO_DYN_LINK')
            elif library == 'filesystem':
                env.AppendUnique(CPPDEFINES='BOOST_FILESYSTEM_DYN_LINK')
            elif library == 'date_time':
                env.AppendUnique(CPPDEFINES='BOOST_DATE_TIME_DYN_LINK')
            elif library == 'regex':
                env.AppendUnique(CPPDEFINES='BOOST_REGEX_DYN_LINK')
            elif library == 'system':
                env.AppendUnique(CPPDEFINES='BOOST_SYSTEM_DYN_LINK')

        library = BoostLibraryBuilder(Boost,
                                      add_dependents=self._add_dependents,
                                      verbose_build=self._verbose_build,
                                      verbose_config=self._verbose_config)(
                                          env, None, None, libraries, 'shared')
        if self._build_always:
            return AlwaysBuild(library)
        else:
            return library
Example #4
0
    def get_branch(cls, path):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = as_str(
            cls.execute_command(
                "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()),
                path))
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch, remote
Example #5
0
 def retrieve_tool( cls, env ):
     url = "hg+https://bitbucket.org/dirkbaechle/scons_qt4"
     try:
         return cuppa.location.Location( env, url, extra_sub_path = "qt4" )
     except cuppa.location.LocationException:
         logger.warn( "Could not retrieve scons_qt4 from [{}]".format( url ) )
     return None
Example #6
0
def _determine_latest_boost_verion(offline):
    current_release = "1.71.0"
    if not offline:
        try:
            boost_version_url = 'https://www.boost.org/users/download/'
            logger.info("Checking current boost version from {}...".format(
                as_info(boost_version_url)))
            html = lxml.html.parse(urlopen(boost_version_url))

            current_release = html.xpath(
                "/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span"
            )[0].text
            current_release = str(
                re.search(r'(\d[.]\d+([.]\d+)?)', current_release).group(1))

            logger.info("Latest boost release detected as [{}]".format(
                as_info(current_release)))

        except Exception as e:
            logger.warn(
                "Cannot determine latest version of boost - [{}]. Assuming [{}]."
                .format(str(e), current_release))
    else:
        logger.info(
            "In offline mode. No version of boost specified so assuming [{}]".
            format(as_info(current_release)))

    return current_release
Example #7
0
    def get_branch(cls, path):
        branch = None
        try:
            result = cls.execute_command(
                "{git} symbolic-ref HEAD".format(git=cls.binary()), path)
            branch = result.replace("refs/heads/", "").strip()
            logger.trace("Branch (using symbolic-ref) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
            return branch
        except cls.Error:
            pass

        # In case we have a detached head we can fallback to this
        result = cls.execute_command(
            "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()), path)
        match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result)
        if match:
            branches = [b.strip() for b in match.group("branches").split(',')]
            logger.trace("Branches (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items(branches)))
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                branch = branches[0].replace(': ', '/').split('/')[1]
            else:
                branch = branches[-2].split('/')[1]
            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(branch)))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch
Example #8
0
def info(path):
    if not path:
        raise SubversionException(
            "No working copy path specified for calling svnversion with.")

    url = None
    repository = None
    branch = None
    revision = None

    try:
        command = "svn info {}".format(path)
        svn_info = subprocess.check_output(shlex.split(command),
                                           stderr=subprocess.STDOUT)
        url = re.search(r'URL: ([^\s]+)', svn_info).expand(r'\1')
        repository = re.search(r'Repository Root: ([^\s]+)',
                               svn_info).expand(r'\1')
        branch = re.search(r'Relative URL: \^/([^\s]+)',
                           svn_info).expand(r'\1')
        revision = re.search(r'Revision: (\d+)', svn_info).expand(r'\1')
    except subprocess.CalledProcessError:
        raise SubversionException("Not a Subversion working copy")

    try:
        command = "svnversion -n {}".format(path)
        revision = subprocess.check_output(shlex.split(command),
                                           stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError:
        pass
    except OSError:
        logger.warn(
            "The {} binary is not available. Consider installing it.".format(
                as_warning("svnversion")))

    return url, repository, branch, revision
Example #9
0
    def __call__( self, env, libraries ):
        if not self._add_dependents:
            logger.warn( "BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']( env )

        for library in libraries:
            if library.startswith('log'):
                env.AppendUnique( CPPDEFINES = 'BOOST_LOG_DYN_LINK' )
            elif library == 'chrono':
                env.AppendUnique( CPPDEFINES = 'BOOST_CHRONO_DYN_LINK' )
            elif library == 'filesystem':
                env.AppendUnique( CPPDEFINES = 'BOOST_FILESYSTEM_DYN_LINK' )
            elif library == 'date_time':
                env.AppendUnique( CPPDEFINES = 'BOOST_DATE_TIME_DYN_LINK' )
            elif library == 'regex':
                env.AppendUnique( CPPDEFINES = 'BOOST_REGEX_DYN_LINK' )
            elif library == 'system':
                env.AppendUnique( CPPDEFINES = 'BOOST_SYSTEM_DYN_LINK' )

        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'shared' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Example #10
0
    def build_library_from_source( self, env, sources=None, library_name=None, linktype=None ):

        from SCons.Script import Flatten

        if not self._source_path and not sources:
            logger.warn( "Attempting to build library when source path is None" )
            return None

        if not library_name:
            library_name = self._name

        if not linktype:
            linktype = self._linktype

        variant_key = env['tool_variant_dir']

        prebuilt_objects   = self.lazy_create_node( variant_key, self._prebuilt_objects )
        prebuilt_libraries = self.lazy_create_node( variant_key, self._prebuilt_libraries )

        local_dir = self._location.local()
        local_folder = self._location.local_folder()

        build_dir = os.path.abspath( os.path.join( env['abs_build_root'], local_folder, env['tool_variant_working_dir'] ) )
        final_dir = os.path.abspath( os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) )

        logger.debug( "build_dir for [{}] = [{}]".format( as_info(self._name), build_dir ) )
        logger.debug( "final_dir for [{}] = [{}]".format( as_info(self._name), final_dir ) )

        obj_suffix = env['OBJSUFFIX']
        obj_builder = env.StaticObject
        lib_builder = env.BuildStaticLib

        if linktype == "shared":
            obj_suffix = env['SHOBJSUFFIX']
            obj_builder = env.SharedObject
            lib_builder = env.BuildSharedLib

        if not sources:
            sources = env.RecursiveGlob( "*.cpp", start=self._source_path, exclude_dirs=[ env['build_dir'] ] )
            sources.extend( env.RecursiveGlob( "*.cc", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )
            sources.extend( env.RecursiveGlob( "*.c", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) )

        objects = []
        for source in Flatten( [sources] ):
            rel_path = os.path.relpath( str(source), local_dir )
            rel_obj_path = os.path.splitext( rel_path )[0] + obj_suffix
            obj_path = os.path.join( build_dir, rel_obj_path )
            if not rel_obj_path in prebuilt_objects:
                prebuilt_objects[rel_obj_path] = obj_builder( obj_path, source )
            objects.append( prebuilt_objects[rel_obj_path] )

        if not linktype in prebuilt_libraries:
            library = lib_builder( library_name, objects, final_dir = final_dir )
            if linktype == "shared":
                library = env.Install( env['abs_final_dir'], library )
            prebuilt_libraries[linktype] = library
        else:
            logger.trace( "using existing library = [{}]".format( str(prebuilt_libraries[linktype]) ) )

        return prebuilt_libraries[linktype]
Example #11
0
File: git.py Project: j0nnyw/cuppa
    def get_branch(cls, path):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = cls.execute_command(
            "{git} show -s --pretty=\%d --decorate=full HEAD".format(
                git=cls.binary()), path)

        match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?',
                          result)

        if match:
            refs = [{
                "ref": r.strip(),
                "type": ""
            } for r in match.group("refs").split(',')]
            logger.trace("Refs (using show) for [{}] are [{}]".format(
                as_notice(path), colour_items((r["ref"] for r in refs))))
            if refs:
                for ref in refs:
                    if ref["ref"].startswith("refs/heads/"):
                        ref["ref"] = ref["ref"][len("refs/heads/"):]
                        ref["type"] = "L"
                    elif ref["ref"].startswith("refs/tags/"):
                        ref["ref"] = ref["ref"][len("refs/tags/"):]
                        ref["type"] = "T"
                    elif ref["ref"].startswith("refs/remotes/"):
                        ref["ref"] = ref["ref"][len("refs/remotes/"):]
                        ref["type"] = "R"
                    else:
                        ref["type"] = "U"

                logger.trace(
                    "Refs (after classification) for [{}] are [{}]".format(
                        as_notice(path),
                        colour_items(
                            (":".join([r["type"], r["ref"]]) for r in refs))))

                if refs[0]["type"] == "L":
                    branch = refs[0]["ref"]
                elif refs[0]["type"] == "T":
                    branch = refs[0]["ref"]
                elif refs[0]["type"] == "R":
                    branch = refs[0]["ref"].split('/')[1]

                remote = next(
                    (ref["ref"] for ref in refs if ref["type"] == "R"), None)

            logger.trace("Branch (using show) for [{}] is [{}]".format(
                as_notice(path), as_info(str(branch))))
        else:
            logger.warn("No branch found from [{}]".format(result))

        return branch, remote
Example #12
0
 def coverage_tool(cls, reported_version):
     gcov = "gcov"
     versioned_gcov = "{gcov}-{version}".format(
         gcov=gcov, version=str(reported_version['major']))
     if cuppa.build_platform.where_is(versioned_gcov):
         return versioned_gcov
     if cuppa.build_platform.where_is(gcov):
         version = cls.version_from_command(gcov, "gcc")
         if version == reported_version:
             return gcov
     logger.warn(
         "Coverage requested for current toolchain but none is available")
     return None
Example #13
0
 def coverage_tool( cls, cxx_version ):
     llvm_cov = "llvm-cov"
     versioned_llvm_cov = None
     if cxx_version:
         versioned_llvm_cov = "{llvm_cov}-{version}".format( llvm_cov=llvm_cov, version=cxx_version[0] )
         if cuppa.build_platform.where_is( versioned_llvm_cov ):
             return versioned_llvm_cov + " gcov"
     if cuppa.build_platform.where_is( llvm_cov ):
         version = cls.llvm_version_from( llvm_cov )
         if version == cxx_version:
             return llvm_cov + " gcov"
     logger.warn( "Coverage requested for current toolchain but none is available" )
     return None
Example #14
0
def log_exception( error, suppress=None ):

    from cuppa.log import logger
    from cuppa.colourise import as_info

    if not suppress:
        logger.fatal( "Cuppa terminated by exception [{}: {}]".format(
                    as_info( error.__class__.__name__ ),
                    as_info( str(error) )
        ) )
        if not logger.isEnabledFor( logging.EXCEPTION ):
            logger.warn( "Use {} (or above) to see the stack".format( as_info( "--verbosity=exception" ) ) )
    logger.exception( traceback.format_exc() )
Example #15
0
def log_exception( error, suppress=None ):

    from cuppa.log import logger
    from cuppa.colourise import as_info

    if not suppress:
        logger.fatal( "Cuppa terminated by exception [{}: {}]".format(
                    as_info( error.__class__.__name__ ),
                    as_info( str(error) )
        ) )
        if not logger.isEnabledFor( logging.EXCEPTION ):
            logger.warn( "Use {} (or above) to see the stack".format( as_info( "--verbosity=exception" ) ) )
    logger.exception( traceback.format_exc() )
Example #16
0
def check_current_version():

    installed_version = get_version()
    logger.info("cuppa: version {}".format(as_info(installed_version)))
    try:
        pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
        latest_available = pypi.package_releases('cuppa')[0]
        if parse_version(installed_version) < parse_version(latest_available):
            logger.warn(
                "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                    as_warning(latest_available),
                    as_emphasised("pip install -U cuppa")))
    except:
        pass
Example #17
0
def determine_latest_boost_verion():
    current_release = "1.61.0"
    try:
        html = lxml.html.parse('http://www.boost.org/users/download/')

        current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text
        current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) )

        logger.debug( "latest boost release detected as [{}]".format( as_info( current_release ) ) )

    except Exception as e:
        logger.warn( "cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) )

    return current_release
Example #18
0
 def coverage_tool(cls, cxx_version):
     llvm_cov = "llvm-cov"
     versioned_llvm_cov = None
     if cxx_version:
         versioned_llvm_cov = "{llvm_cov}-{version}".format(
             llvm_cov=llvm_cov, version=cxx_version[0])
         if cuppa.build_platform.where_is(versioned_llvm_cov):
             return versioned_llvm_cov + " gcov"
     if cuppa.build_platform.where_is(llvm_cov):
         version = cls.llvm_version_from(llvm_cov)
         if version == cxx_version:
             return llvm_cov + " gcov"
     logger.warn(
         "Coverage requested for current toolchain but none is available")
     return None
Example #19
0
    def _libc_version( self, machine, system ):

        libc_file = "libc.so.6"
        libc_path = "/lib/" + libc_file

        if not path.exists( libc_path ):
            multiarch_lib_path = '-'.join( [ machine, system.lower(), 'gnu' ] )
            libc_path = "/lib/" + multiarch_lib_path + "/" + libc_file

        try:
            libc_version = Popen([libc_path], stdout=PIPE).communicate()[0]
            return 'libc' + search( r'^GNU C Library [()a-zA-Z ]*([0-9][.0-9]+)', as_str( libc_version ), MULTILINE ).expand(r'\1').replace('.','')
        except:
            logger.warn( "Could not detect the version of libc installed. You might be missing some development libraries!" )
            return None
Example #20
0
def check_current_version( offline ):

    installed_version = get_version()
    logger.info( "cuppa: version {}".format( as_info( installed_version ) ) )
    if not offline:
        try:
            pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
            latest_available = pypi.package_releases('cuppa')[0]
            if parse_version( installed_version ) < parse_version( latest_available ):
                logger.warn( "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                        as_warning( latest_available ),
                        as_emphasised( "pip install -U cuppa" )
                ) )
        except:
            pass
Example #21
0
    def __init__(self, env):
        self._flags = {}
        self._flags['INCPATH'] = [
            os.path.join(self._location.local(), "include")
        ]

        pg_config = "pg_config"
        if platform.system() == "Windows":
            pg_config = pg_config + ".exe"
            if not cuppa.output_processor.command_available(pg_config):
                # try to find the Postgresql install
                program_files = os.environ.get("ProgramW6432")
                postgresql_base = os.path.join(program_files, "PostgreSQL")
                if os.path.exists(postgresql_base):
                    paths = glob.glob(postgresql_base + '\\*')
                    if len(paths):
                        paths.sort()
                        latest = paths[-1]
                        pg_config = '\"' + os.path.join(
                            latest, "bin", pg_config) + '\"'

        if cuppa.output_processor.command_available(pg_config):
            command = "{pg_config} --includedir".format(pg_config=pg_config)
            libpq_include = subprocess.check_output(
                shlex.split(command), stderr=subprocess.STDOUT).strip()
            self._flags['INCPATH'].append(libpq_include)

            command = "{pg_config} --libdir".format(pg_config=pg_config)
            libpq_libpath = subprocess.check_output(
                shlex.split(command), stderr=subprocess.STDOUT).strip()
            self._flags['LIBPATH'] = [libpq_libpath]
        else:
            logger.warn(
                "postgresql: pg_config not available so cannot determine LIBPATH for postgres libraries"
            )
            self._flags['LIBPATH'] = []

        self._flags['DYNAMICLIBS'] = ['pq']

        self._src_path = os.path.join(self._location.local(), "src")

        env.AddMethod(
            QuincePostgresqlLibraryMethod(self._location.local(),
                                          self._src_path),
            "QuincePostgresqlLibrary")
Example #22
0
def _determine_latest_boost_verion( offline ):
    current_release = "1.69.0"
    if not offline:
        try:
            boost_version_url = 'https://www.boost.org/users/download/'
            logger.info( "Checking current boost version from {}...".format( as_info( boost_version_url ) ) )
            html = lxml.html.parse( urllib2.urlopen( boost_version_url ) )

            current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text
            current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) )

            logger.info( "Latest boost release detected as [{}]".format( as_info( current_release ) ) )

        except Exception as e:
            logger.warn( "Cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) )
    else:
        logger.info( "In offline mode. No version of boost specified so assuming [{}]".format( as_info( current_release ) ) )

    return current_release
Example #23
0
 def update_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
     url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
     rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
     version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
     logger.info( "Updating [{}] in [{}]{} at [{}]".format(
             as_info( location ),
             as_notice( local_dir_with_sub_dir ),
             ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
             as_info( version )
     ) )
     try:
         update( vcs_backend, local_dir_with_sub_dir, rev_options )
         logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
     except pip_exceptions.PipError as error:
         logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                 as_warning( location ),
                 as_warning( local_dir_with_sub_dir ),
                 ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                 as_warning( str(error) )
         ) )
Example #24
0
    def __call__(self, option, opt, value, parser):
        toolchains = set()
        requested = value.split(',')
        for toolchain in requested:
            supported = fnmatch.filter( self._supported, toolchain )

            if not supported:
                logger.warn( "Requested toolchain [{}] does not match any supported, skipping".format( as_info(toolchain) ) )
            else:
                available = fnmatch.filter( self._available, toolchain )

                if not available:
                    logger.warn( "Requested toolchain [{}] does not match any available, skipping".format( as_info(toolchain) ) )
                else:
                    toolchains.update( available )

        if not toolchains:
            logger.error( "None of the requested toolchains are available" )

        parser.values.toolchains = list(toolchains)
Example #25
0
    def __call__( self, env, libraries ):

        if not self._add_dependents:
            logger.warn( "BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']( env )

        logger.trace( "Build static libraries [{}]".format( colour_items( libraries ) ) )

        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'static' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Example #26
0
File: git.py Project: ja11sop/cuppa
    def get_branch( cls, path ):
        branch = None
        remote = None

        # In case we have a detached head we use this
        result = cls.execute_command( "{git} show -s --pretty=\%d HEAD".format( git=cls.binary() ), path )
        match = re.search( r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result )
        if match:
            branches = [ b.strip() for b in match.group("branches").split(',') ]
            logger.trace( "Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches) ) )
            if len(branches) == 1:
                # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name
                # otherwise this will simply extract the branch_name as expected
                if not branches[0].startswith('tag:'):
                    remote = branches[0]
                branch = branches[0].replace(': ','/').split('/')[1]
            else:
                remote = branches[-2]
                branch = remote.split('/')[1]
            logger.trace( "Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch) ) )
        else:
            logger.warn( "No branch found from [{}]".format( result ) )

        return branch, remote
Example #27
0
    def build(self, cuppa_env):

        #        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects = cuppa_env.get_option('projects')
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                    cuppa_env['launch_dir'],
                    [cuppa_env['build_root'], cuppa_env['download_root']])
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info("Using sub-sconscripts [{}]".format(
                        colour_items(projects)))
            elif projects:
                logger.info("Using default_projects [{}]".format(
                    colour_items(projects)))

        if projects:

            sconscripts = []

            for project in projects:

                if (not os.path.exists(project)
                        and not cuppa_env['run_from_launch_dir']
                        and not os.path.isabs(project)):

                    path = os.path.join(cuppa_env['launch_dir'], project)

                    if os.path.exists(path):
                        if os.path.isdir(path):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project, [
                                    cuppa_env['build_root'],
                                    cuppa_env['download_root']
                                ])
                            if sub_sconscripts:
                                logger.info(
                                    "Reading project folder [{}] and using sub-sconscripts [{}]"
                                    .format(project,
                                            colour_items(sub_sconscripts)))
                                sconscripts.extend(sub_sconscripts)
                        else:
                            sconscripts.append(path)

                elif os.path.exists(project) and os.path.isdir(project):
                    sub_sconscripts = self.get_sub_sconscripts(
                        project,
                        [cuppa_env['build_root'], cuppa_env['download_root']])
                    if sub_sconscripts:
                        logger.info(
                            "Reading project folder [{}] and using sub-sconscripts [{}]"
                            .format(project, colour_items(sub_sconscripts)))
                        sconscripts.extend(sub_sconscripts)
                else:
                    sconscripts.append(project)

            for toolchain in toolchains:
                build_envs = self.create_build_envs(toolchain, cuppa_env)
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option('decider')
                        if decider:
                            build_env['env'].Decider(decider)
                        self.call_project_sconscript_files(
                            toolchain, build_env['variant'],
                            build_env['target_arch'], build_env['abi'],
                            build_env['env'], sconscript)

            if cuppa_env['dump']:
                print(
                    "cuppa: Performing dump only, so no builds will be attempted."
                )
                print("cuppa: Nothing to be done. Exiting.")
                SCons.Script.Exit()

        else:
            logger.warn("No projects to build. Nothing to be done")
Example #28
0
    def __init__(self,
                 sconstruct_path,
                 base_path=os.path.abspath('.'),
                 branch_root=None,
                 default_options={},
                 default_projects=[],
                 default_variants=[],
                 default_dependencies=[],
                 default_profiles=[],
                 dependencies=[],
                 profiles=[],
                 default_runner=None,
                 configure_callback=None,
                 tools=[]):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools(tools)

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults(
            dependencies, default_dependencies, "dependencies")
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults(
            profiles, default_profiles, "profiles")

        self.initialise_options(cuppa_env, default_options, profiles,
                                dependencies)
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure(
            cuppa_env, callback=configure_callback)

        enable_thirdparty_logging(
            cuppa_env.get_option('enable-thirdparty-logging') and True
            or False)
        self._set_verbosity_level(cuppa_env)

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env[
            'sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format(cuppa_env)

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option('offline')

        cuppa.version.check_current_version(cuppa_env['offline'])

        if cuppa_env['offline']:
            logger.info(as_info_label("Running in OFFLINE mode"))

        logger.info("using sconstruct file [{}]".format(
            as_notice(cuppa_env['sconstruct_file'])))

        if dependencies_warning:
            logger.warn(dependencies_warning)

        if profiles_warning:
            logger.warn(profiles_warning)

        help = cuppa_env.get_option('help') and True or False

        cuppa_env['minimal_output'] = cuppa_env.get_option('minimal_output')
        cuppa_env['ignore_duplicates'] = cuppa_env.get_option(
            'ignore_duplicates')

        cuppa_env['working_dir'] = os.getcwd()
        cuppa_env['launch_dir'] = os.path.relpath(SCons.Script.GetLaunchDir(),
                                                  cuppa_env['working_dir'])
        cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir'] = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len(cuppa_env['launch_dir'].split(os.path.sep))
            cuppa_env['launch_offset_dir'] = os.path.sep.join(
                ['..' for i in range(levels)])

        cuppa_env['base_path'] = os.path.normpath(
            os.path.expanduser(base_path))
        cuppa_env['branch_root'] = branch_root and os.path.normpath(
            os.path.expanduser(branch_root)) or base_path
        cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath(
            cuppa_env['base_path'], cuppa_env['branch_root']) or None

        thirdparty = cuppa_env.get_option('thirdparty')
        if thirdparty:
            thirdparty = os.path.normpath(os.path.expanduser(thirdparty))

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options(cuppa_env)
        cuppa.core.location_options.process_location_options(cuppa_env)

        cuppa_env['current_branch'] = ''
        cuppa_env['current_revision'] = ''
        if not help and not self._configure.handle_conf_only():
            if cuppa_env['location_match_current_branch']:
                url, repo, branch, remote, rev = cuppa.scms.scms.get_current_rev_info(
                    cuppa_env['sconstruct_dir'])
                if branch:
                    cuppa_env['current_branch'] = branch
                if rev:
                    cuppa_env['current_revision'] = rev
                logger.info(
                    "Current build on branch [{}] at revision [{}] from remote [{}] in [{}] at [{}]"
                    .format(as_info(str(branch)), as_info(str(rev)),
                            as_info(str(remote)), as_info(str(repo)),
                            as_info(str(url))))

        cuppa_env['default_projects'] = default_projects
        cuppa_env['default_variants'] = default_variants and set(
            default_variants) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies']
        cuppa_env['dependencies'] = {}
        cuppa_env[
            'default_profiles'] = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles']
        cuppa_env['profiles'] = {}

        test_runner = cuppa_env.get_option(
            'runner', default=default_runner and default_runner or 'process')
        cuppa_env['default_runner'] = test_runner

        cuppa_env['propagate_env'] = cuppa_env.get_option(
            'propagate-env') and True or False
        cuppa_env['propagate_path'] = cuppa_env.get_option(
            'propagate-path') and True or False
        cuppa_env['merge_path'] = cuppa_env.get_option(
            'merge-path') and True or False
        cuppa_env['show_test_output'] = cuppa_env.get_option(
            'show-test-output') and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option(
            'suppress-process-output') and True or False
        cuppa_env['dump'] = cuppa_env.get_option('dump') and True or False
        cuppa_env['clean'] = cuppa_env.get_option('clean') and True or False

        self.add_variants(cuppa_env)
        self.add_toolchains(cuppa_env)
        self.add_platforms(cuppa_env)

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option('toolchains')
        cuppa_env['target_architectures'] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [
                    cuppa_env[self.toolchains_key][default_toolchain]
                ]
            else:
                toolchains = [
                    cuppa_env[self.toolchains_key][t] for t in toolchains
                ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile(name, profile):
                cuppa_env['profiles'][name] = profile

            def add_dependency(name, dependency):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options("methods", cuppa_env)

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env("dependencies",
                                                      cuppa_env,
                                                      add_dependency)
                cuppa.modules.registration.add_to_env("profiles", cuppa_env,
                                                      add_profile)
                cuppa.modules.registration.add_to_env("methods", cuppa_env)
                cuppa.modules.registration.add_to_env("project_generators",
                                                      cuppa_env)

                for method_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.method.plugins', name=None):
                    method_plugin.load().add_to_env(cuppa_env)

                for profile_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.profile.plugins', name=None):
                    profile_plugin.load().add_to_env(cuppa_env)

                if profiles:
                    for profile in profiles:
                        profile.add_to_env(cuppa_env, add_profile)

                logger.trace("available profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["profiles"].keys()))))

                logger.info("default profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["default_profiles"]),
                                 as_info)))

                for dependency_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.dependency.plugins', name=None):
                    dependency_plugin.load().add_to_env(
                        cuppa_env, add_dependency)

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env(cuppa_env, add_dependency)

                logger.trace("available dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["dependencies"].keys()))))

                logger.info("default dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["default_dependencies"]),
                                 as_info)))

            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info(
                    as_info_label(
                        "Running in DUMP mode, no building will be attempted"))
                cuppa_env.dump()

            job_count = cuppa_env.get_option('num_jobs')
            parallel = cuppa_env.get_option('parallel')
            parallel_mode = "manually"

            if job_count == 1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption('num_jobs', job_count)
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel'] = parallel
            if job_count > 1:
                logger.info(
                    "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"), as_info("jobs"),
                        as_emphasised(parallel_mode),
                        as_info(str(SCons.Script.GetOption('num_jobs')))))

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build(cuppa_env)

        if self._configure.handle_conf_only():
            print(
                "cuppa: Handling configuration only, so no builds will be attempted."
            )
            print(
                "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            )
            print("")
            print("scons -D {}".format(
                self._command_line_from_settings(
                    cuppa_env['configured_options'])))
            print("")
            print("cuppa: Nothing to be done. Exiting.")
            SCons.Script.Exit()
Example #29
0
File: git.py Project: iCodeIN/cuppa
    def get_branch(cls, path):
        branch = None
        remote = None

        head_detached = False
        command = "{git} branch".format(git=cls.binary())
        branch_info = cls.execute_command(command, path)
        if branch_info:
            match = re.search(r'^[*] [(]HEAD detached ', branch_info)
            if match:
                head_detached = True

        if not head_detached:
            result = cls.execute_command(
                "{git} status -sb".format(git=cls.binary()), path)
            if result:
                match = re.search(
                    r'## (?P<branch>[^)]+)[.][.][.](?P<remote>[^)\n]+)',
                    result)
                if match:
                    branch = match.group("branch")
                    remote = match.group("remote")
                match = re.search(r'## HEAD (no branch)', result)
                # Check if we are rebasing
                if match:
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command, path)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))

            return branch, remote

        else:
            result = cls.execute_command(
                "{git} show -s --pretty=\%d --decorate=full HEAD".format(
                    git=cls.binary()), path)

            match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?',
                              result)

            if match and match.group("refs"):
                refs = [{
                    "ref": r.strip(),
                    "type": ""
                } for r in match.group("refs").split(',')]
                logger.trace("Refs (using show) for [{}] are [{}]".format(
                    as_notice(path), colour_items((r["ref"] for r in refs))))
                if refs:
                    for ref in refs:
                        if ref["ref"].startswith("refs/heads/"):
                            ref["ref"] = ref["ref"][len("refs/heads/"):]
                            ref["type"] = "L"
                        elif ref["ref"].startswith("refs/tags/"):
                            ref["ref"] = ref["ref"][len("refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("tag: refs/tags/"):
                            ref["ref"] = ref["ref"][len("tag: refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("refs/remotes/"):
                            ref["ref"] = ref["ref"][len("refs/remotes/"):]
                            ref["type"] = "R"
                        else:
                            ref["type"] = "U"

                    logger.trace(
                        "Refs (after classification) for [{}] are [{}]".format(
                            as_notice(path),
                            colour_items((":".join([r["type"], r["ref"]])
                                          for r in refs))))

                    if refs[0]["type"] == "L":
                        branch = refs[0]["ref"]
                    #elif refs[0]["type"] == "T":
                    #branch = refs[0]["ref"]
                    elif refs[0]["type"] == "R":
                        branch = refs[0]["ref"].split('/')[1]

                    remote = next(
                        (ref["ref"] for ref in refs if ref["type"] == "R"),
                        None)

                logger.trace("Branch (using show) for [{}] is [{}]".format(
                    as_notice(path), as_info(str(branch))))
            else:
                if result == "(HEAD)":
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))
        #if not branch:
        #logger.warn( as_warning( "No branch found from [{}]".format( result ) ) )

        return branch, remote
Example #30
0
    def build_library_from_source(self,
                                  env,
                                  sources=None,
                                  library_name=None,
                                  linktype=None):

        from SCons.Script import Flatten

        if not self._source_path and not sources:
            logger.warn("Attempting to build library when source path is None")
            return None

        if not library_name:
            library_name = self._name

        if not linktype:
            linktype = self._linktype

        variant_key = env['tool_variant_dir']

        prebuilt_objects = self.lazy_create_node(variant_key,
                                                 self._prebuilt_objects)
        prebuilt_libraries = self.lazy_create_node(variant_key,
                                                   self._prebuilt_libraries)

        local_dir = self._location.local()
        local_folder = self._location.local_folder()

        build_dir = os.path.abspath(
            os.path.join(env['abs_build_root'], local_folder,
                         env['tool_variant_working_dir']))
        final_dir = os.path.abspath(
            os.path.normpath(os.path.join(build_dir, env['final_dir'])))

        logger.debug("build_dir for [{}] = [{}]".format(
            as_info(self._name), build_dir))
        logger.debug("final_dir for [{}] = [{}]".format(
            as_info(self._name), final_dir))

        obj_suffix = env['OBJSUFFIX']
        obj_builder = env.StaticObject
        lib_builder = env.BuildStaticLib

        if linktype == "shared":
            obj_suffix = env['SHOBJSUFFIX']
            obj_builder = env.SharedObject
            lib_builder = env.BuildSharedLib

        if not sources:
            sources = env.RecursiveGlob("*.cpp",
                                        start=self._source_path,
                                        exclude_dirs=[env['build_dir']])
            sources.extend(
                env.RecursiveGlob("*.cc",
                                  start=self._source_path,
                                  exclude_dirs=[env['build_dir']]))
            sources.extend(
                env.RecursiveGlob("*.c",
                                  start=self._source_path,
                                  exclude_dirs=[env['build_dir']]))

        objects = []
        for source in Flatten([sources]):
            rel_path = os.path.relpath(str(source), local_dir)
            rel_obj_path = os.path.splitext(rel_path)[0] + obj_suffix
            obj_path = os.path.join(build_dir, rel_obj_path)
            if not rel_obj_path in prebuilt_objects:
                prebuilt_objects[rel_obj_path] = obj_builder(obj_path, source)
            objects.append(prebuilt_objects[rel_obj_path])

        if not linktype in prebuilt_libraries:
            library = lib_builder(library_name, objects, final_dir=final_dir)
            if linktype == "shared":
                library = env.Install(env['abs_final_dir'], library)
            prebuilt_libraries[linktype] = library
        else:
            logger.trace("using existing library = [{}]".format(
                str(prebuilt_libraries[linktype])))

        return prebuilt_libraries[linktype]
Example #31
0
    def build( self, cuppa_env ):

#        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects   = cuppa_env.get_option( 'projects' )
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                        cuppa_env['launch_dir'],
                        [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                )
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info( "Using sub-sconscripts [{}]".format( colour_items( projects ) ) )
            elif projects:
                logger.info( "Using default_projects [{}]".format( colour_items( projects ) ) )

        if projects:

            sconscripts = []

            for project in projects:

                if(     not os.path.exists( project )
                    and not cuppa_env['run_from_launch_dir']
                    and not os.path.isabs( project ) ):

                    path = os.path.join( cuppa_env['launch_dir'], project )

                    if os.path.exists( path ):
                        if os.path.isdir( path ):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project,
                                [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                            )
                            if sub_sconscripts:
                                logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                        project, colour_items( sub_sconscripts )
                                ) )
                                sconscripts.extend( sub_sconscripts )
                        else:
                            sconscripts.append( path )

                elif os.path.exists( project ) and os.path.isdir( project ):
                    sub_sconscripts = self.get_sub_sconscripts(
                            project,
                            [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                    )
                    if sub_sconscripts:
                        logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                project, colour_items( sub_sconscripts )
                        ) )
                        sconscripts.extend( sub_sconscripts )
                else:
                    sconscripts.append( project )

            for toolchain in toolchains:
                build_envs = self.create_build_envs( toolchain, cuppa_env )
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option( 'decider' )
                        if decider:
                            build_env['env'].Decider( decider )
                        self.call_project_sconscript_files( toolchain, build_env['variant'], build_env['target_arch'], build_env['env'], sconscript )

        else:
            logger.warn( "No projects to build. Nothing to be done" )
Example #32
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip_download.url_to_path( location )

        if not pip_download.is_url( location ):

            if pip_download.is_archive_file( location ):

                self._local_folder = self.folder_name_from_path( location, cuppa_env )
                local_directory = os.path.join( base, self._local_folder )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        return local_directory

                self.extract( location, local_dir_with_sub_dir )
                logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            else:
                local_directory = branch and os.path.join( location, branch ) or location
                self._local_folder = self.folder_name_from_path( location, cuppa_env )

                logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path( full_url, cuppa_env )
            local_directory = os.path.join( base, self._local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) )
                        logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) )

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( error )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( self.expand_secret( location ) )
                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists( local_directory ):
                        url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
                        rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        if not offline:
                            logger.info( "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info( location ),
                                    as_notice( local_dir_with_sub_dir ),
                                    ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                    as_info( version )
                            ) )
                            try:
                                update( vcs_backend, local_dir_with_sub_dir, rev_options )
                                logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                            except pip_exceptions.PipError as error:
                                logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                        as_warning( location ),
                                        as_warning( local_dir_with_sub_dir ),
                                        ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                        as_warning( str(error) )
                                ) )
                        else:
                            logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) )
                    else:
                        rev_options = self.get_rev_options( vc_type, vcs_backend )
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info( "{} [{}] into [{}]{}".format(
                                    action,
                                    as_info( location ),
                                    as_info( local_dir_with_sub_dir ),
                                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
                            ) )
                            try:
                                vcs_backend.obtain( local_dir_with_sub_dir )
                                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                        as_info( location ),
                                        as_notice( local_dir_with_sub_dir ),
                                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                                        as_error( str(error) )
                                ) )
                                if attempt > max_attempts:
                                    raise LocationException( str(error) )

                logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
Example #33
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip.download.url_to_path( location )

        if not pip.download.is_url( location ):

            if pip.download.is_archive_file( location ):

                local_folder = self.folder_name_from_path( location )
                local_directory = os.path.join( base, local_folder )

                if os.path.exists( local_directory ):
                    try:
                        os.rmdir( local_directory )
                    except:
                        return local_directory, False

                self.extract( location, local_directory )
            else:
                local_directory = branch and os.path.join( location, branch ) or location
                return local_directory, False
        else:

            local_folder = self.folder_name_from_path( full_url )
            local_directory = os.path.join( base, local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory
                        return local_directory, True

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip.vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( location )
                    rev_options = self.get_rev_options( vc_type, vcs_backend )

                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                    if os.path.exists( local_directory ):

                        url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        logger.debug( "Updating [{}] in [{}]{} at [{}]".format(
                                as_info( location ),
                                as_notice( local_dir_with_sub_dir ),
                                ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                as_info( version )
                        ) )
                        try:
                            vcs_backend.update( local_dir_with_sub_dir, rev_options )
                            logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                    as_warning( location ),
                                    as_warning( local_dir_with_sub_dir ),
                                    ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                    as_warning( str(error) )
                            ) )
                    else:
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        logger.info( "{} [{}] into [{}]".format(
                                action, as_info( location ),
                                as_info( local_dir_with_sub_dir )
                        ) )
                        try:
                            vcs_backend.obtain( local_dir_with_sub_dir )
                            logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                    as_error( location ),
                                    as_error( local_dir_with_sub_dir ),
                                    ( rev_options and  " to {}".format( as_error(  str(rev_options) ) ) or ""),
                                    as_error( str( error ) )
                            ) )
                            raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            return local_directory, True
Example #34
0
    def __init__( self,
                  sconstruct_path,
                  base_path            = os.path.abspath( '.' ),
                  branch_root          = None,
                  default_options      = {},
                  default_projects     = [],
                  default_variants     = [],
                  default_dependencies = [],
                  default_profiles     = [],
                  dependencies         = [],
                  profiles             = [],
                  default_runner       = None,
                  configure_callback   = None,
                  tools                = [] ):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools( tools )

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies" )
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles" )

        self.initialise_options( cuppa_env, default_options, profiles, dependencies )
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback )

        enable_thirdparty_logging( cuppa_env.get_option( 'enable-thirdparty-logging' ) and True or False )
        self._set_verbosity_level( cuppa_env )

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env['sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format( cuppa_env )

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option( 'offline' )

        cuppa.version.check_current_version( cuppa_env['offline'] )

        if cuppa_env['offline']:
            logger.info( as_info_label( "Running in OFFLINE mode" ) )

        logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) )

        if dependencies_warning:
            logger.warn( dependencies_warning )

        if profiles_warning:
            logger.warn( profiles_warning )

        help = cuppa_env.get_option( 'help' ) and True or False

        cuppa_env['minimal_output']       = cuppa_env.get_option( 'minimal_output' )
        cuppa_env['ignore_duplicates']    = cuppa_env.get_option( 'ignore_duplicates' )

        cuppa_env['working_dir']          = os.getcwd()
        cuppa_env['launch_dir']           = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] )
        cuppa_env['run_from_launch_dir']  = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir']    = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len( cuppa_env['launch_dir'].split( os.path.sep ) )
            cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] )

        cuppa_env['base_path']   = os.path.normpath( os.path.expanduser( base_path ) )
        cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path
        cuppa_env['branch_dir']  = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None

        thirdparty = cuppa_env.get_option( 'thirdparty' )
        if thirdparty:
            thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) )

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options( cuppa_env )
        cuppa.core.location_options.process_location_options( cuppa_env )

        cuppa_env['default_projects']     = default_projects
        cuppa_env['default_variants']     = default_variants and set( default_variants ) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH']           = cuppa_env['default_dependencies']
        cuppa_env['dependencies']         = {}
        cuppa_env['default_profiles']     = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE']        = cuppa_env['default_profiles']
        cuppa_env['profiles']             = {}

        test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' )
        cuppa_env['default_runner']  = test_runner

        cuppa_env['propagate_env']       = cuppa_env.get_option( 'propagate-env' )       and True or False
        cuppa_env['propagate_path']      = cuppa_env.get_option( 'propagate-path' )      and True or False
        cuppa_env['merge_path']          = cuppa_env.get_option( 'merge-path' )          and True or False
        cuppa_env['show_test_output']    = cuppa_env.get_option( 'show-test-output' )    and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output' ) and True or False
        cuppa_env['dump']                = cuppa_env.get_option( 'dump' )                and True or False
        cuppa_env['clean']               = cuppa_env.get_option( 'clean' )               and True or False

        self.add_variants   ( cuppa_env )
        self.add_toolchains ( cuppa_env )
        self.add_platforms  ( cuppa_env )

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option( 'toolchains' )
        cuppa_env[ 'target_architectures' ] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ]
            else:
                toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile( name, profile ):
                cuppa_env['profiles'][name] = profile

            def add_dependency( name, dependency ):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options( "methods", cuppa_env )

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env( "dependencies",       cuppa_env, add_dependency )
                cuppa.modules.registration.add_to_env( "profiles",           cuppa_env, add_profile )
                cuppa.modules.registration.add_to_env( "methods",            cuppa_env )
                cuppa.modules.registration.add_to_env( "project_generators", cuppa_env )

                for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
                    method_plugin.load().add_to_env( cuppa_env )

                for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ):
                    profile_plugin.load().add_to_env( cuppa_env )

                if profiles:
                    for profile in profiles:
                        profile.add_to_env( cuppa_env, add_profile )

                logger.trace( "available profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["profiles"].keys() ) )
                ) )

                logger.info( "default profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["default_profiles"] ), as_info )
                ) )

                for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ):
                    dependency_plugin.load().add_to_env( cuppa_env, add_dependency )

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env( cuppa_env, add_dependency )


                logger.trace( "available dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["dependencies"].keys() ) )
                ) )

                logger.info( "default dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["default_dependencies"] ), as_info )
                ) )


            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info( as_info_label( "Running in DUMP mode, no building will be attempted" ) )
                cuppa_env.dump()

            job_count = cuppa_env.get_option( 'num_jobs' )
            parallel  = cuppa_env.get_option( 'parallel' )
            parallel_mode = "manually"

            if job_count==1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption( 'num_jobs', job_count )
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel']  = parallel
            if job_count>1:
                logger.debug( "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"),
                        as_info( "jobs" ),
                        as_emphasised(parallel_mode),
                        as_info( str( SCons.Script.GetOption( 'num_jobs') ) )
                ) )

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build( cuppa_env )

        if self._configure.handle_conf_only():
            print "cuppa: Handling configuration only, so no builds will be attempted."
            print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            print ""
            print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) )
            print ""
            print "cuppa: Nothing to be done. Exiting."
            SCons.Script.Exit()
Example #35
0
    def get_local_directory(self, cuppa_env, location, sub_dir, branch,
                            full_url):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs(base):
            base = os.path.join(cuppa_env['working_dir'], base)

        if location.startswith('file:'):
            location = pip_download.url_to_path(location)

        if not pip_is_url(location):

            if pip_is_archive_file(location):

                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)
                local_directory = os.path.join(base, self._local_folder)

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        return local_directory

                self.extract(location, local_dir_with_sub_dir)
                logger.debug("(local archive) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local archive) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            else:
                local_directory = branch and os.path.join(location,
                                                          branch) or location
                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)

                logger.debug("(local file) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local file) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path(
                full_url, cuppa_env)
            local_directory = os.path.join(base, self._local_folder)

            if full_url.scheme.startswith(
                    'http') and self.url_is_download_archive_url(
                        full_url.path):
                logger.debug("[{}] is an archive download".format(
                    as_info(location)))

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        # If not empty this will fail
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug(
                            "(already present) Location = [{}]".format(
                                as_info(location)))
                        logger.debug(
                            "(already present) Local folder = [{}]".format(
                                as_info(str(self._local_folder))))

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive(
                    cuppa_env['cache_root'], self._local_folder)
                if cached_archive:
                    logger.debug("Cached archive [{}] found for [{}]".format(
                        as_info(cached_archive), as_info(location)))
                    self.extract(cached_archive, local_dir_with_sub_dir)
                else:
                    logger.info("Downloading [{}]...".format(
                        as_info(location)))
                    try:
                        report_hook = None
                        if logger.isEnabledFor(logging.INFO):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urlretrieve(location,
                                                        reporthook=report_hook)
                        name, extension = os.path.splitext(filename)
                        logger.info(
                            "[{}] successfully downloaded to [{}]".format(
                                as_info(location), as_info(filename)))
                        self.extract(filename, local_dir_with_sub_dir)
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join(
                                cuppa_env['cache_root'], self._local_folder)
                            logger.debug(
                                "Caching downloaded file as [{}]".format(
                                    as_info(cached_archive)))
                            shutil.copyfile(filename, cached_archive)
                    except ContentTooShortError as error:
                        logger.error(
                            "Download of [{}] failed with error [{}]".format(
                                as_error(location), as_error(str(error))))
                        raise LocationException(error)

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend(vc_type)
                if backend:
                    try:
                        vcs_backend = backend(self.expand_secret(location))
                    except:  # Pip version >= 19
                        backend.url = self.expand_secret(location)
                        vcs_backend = backend
                    local_dir_with_sub_dir = os.path.join(
                        local_directory, sub_dir and sub_dir or "")

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists(local_directory):
                        url, repository, branch, remote, revision = self.get_info(
                            location, local_dir_with_sub_dir, full_url,
                            vc_type)
                        rev_options = self.get_rev_options(vc_type,
                                                           vcs_backend,
                                                           local_remote=remote)
                        version = self.ver_rev_summary(branch, revision,
                                                       self._full_url.path)[0]
                        if not offline:
                            logger.info(
                                "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info(location),
                                    as_notice(local_dir_with_sub_dir),
                                    (rev_options and " on {}".format(
                                        as_notice(str(rev_options))) or ""),
                                    as_info(version)))
                            try:
                                update(vcs_backend, local_dir_with_sub_dir,
                                       rev_options)
                                logger.debug(
                                    "Successfully updated [{}]".format(
                                        as_info(location)))
                            except pip_exceptions.PipError as error:
                                logger.warn(
                                    "Could not update [{}] in [{}]{} due to error [{}]"
                                    .format(as_warning(location),
                                            as_warning(local_dir_with_sub_dir),
                                            (rev_options and " at {}".format(
                                                as_warning(str(rev_options)))
                                             or ""), as_warning(str(error))))
                        else:
                            logger.debug(
                                "Skipping update for [{}] as running in offline mode"
                                .format(as_info(location)))
                    else:
                        rev_options = self.get_rev_options(
                            vc_type, vcs_backend)
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info("{} [{}] into [{}]{}".format(
                                action, as_info(location),
                                as_info(local_dir_with_sub_dir), attempt > 1
                                and "(attempt {})".format(str(attempt)) or ""))
                            try:
                                obtain(vcs_backend, local_dir_with_sub_dir,
                                       vcs_backend.url)
                                logger.debug(
                                    "Successfully retrieved [{}]".format(
                                        as_info(location)))
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as(
                                    "Could not retrieve [{}] into [{}]{} due to error [{}]"
                                    .format(as_info(location),
                                            as_notice(local_dir_with_sub_dir),
                                            (rev_options and " to {}".format(
                                                as_notice(str(rev_options)))
                                             or ""), as_error(str(error))))
                                if attempt > max_attempts:
                                    raise LocationException(str(error))

                logger.debug("(url path) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(url path) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
Example #36
0
    def __init__( self, cuppa_env, location, develop=None, branch_path=None, extra_sub_path=None, name_hint=None ):

        logger.debug( "Create location using location=[{}], develop=[{}], branch_path=[{}], extra_sub_path=[{}], name_hint=[{}]".format(
                as_info( location ),
                as_info( str(develop) ),
                as_info( str(branch_path) ),
                as_info( str(extra_sub_path) ),
                as_info( str(name_hint) )
        ) )

        self._cuppa_env = cuppa_env
        self._supports_relative_versioning = False
        self._current_branch = self._cuppa_env['current_branch']
        self._current_revision = self._cuppa_env['current_revision']
        self._offline = self.option_set('offline')
        offline = self._offline
        self._default_branch = self._cuppa_env['location_default_branch']

        location = self.replace_sconstruct_anchor( location )

        if develop:
            if not os.path.isabs( develop ):
                develop = '#' + develop
            develop = self.replace_sconstruct_anchor( develop )
            logger.debug( "Develop location specified [{}]".format( as_info( develop ) ) )

        if self.option_set('develop') and develop:
            location = develop
            logger.debug( "--develop specified so using location=develop=[{}]".format( as_info( develop ) ) )

        scm_location = location

        if location[-1] == '@':
            self._supports_relative_versioning = True
            scm_location = location[:-1]

        scm_system, vc_type, repo_location, versioning = self.get_scm_system_and_info( self.expand_secret( scm_location ) )

        logger.debug( "Local location and actions for [{location}] being determined in context:{offline}"
                      " vc_type=[{vc_type}], repo_location=[{repo_location}],"
                      " versioning=[{versioning}]".format(
                location = as_info(location),
                offline  = self._offline and " " + as_info_label("OFFLINE") + "," or "",
                vc_type = as_info(str(vc_type)),
                repo_location = as_info(str(repo_location)),
                versioning = as_info(str(versioning))
        ) )

        if self._supports_relative_versioning:
            if self.location_match_current_branch():
                if not scm_system:
                    logger.warn( "Location [{}] specified using relative versioning, but no SCM system is available"
                                 " that matches the version control type [{}]. Relative versioning will be ignored"
                                 " for this location.".format( location, vc_type ) )
                else:
                    branch_exists = False
                    logger.debug( "Relative branching active for [{location}] with"
                                  " current branch [{branch}] and current revision [{revision}]".format(
                            location=as_info(str(location)),
                            branch=as_info(str(self._current_branch)),
                            revision=as_info(str(self._current_revision))
                    ) )

                    if self._current_branch:
                        # Try to checkout on the explicit branch but if that fails fall back to
                        # to the default by stripping off the '@' from the end of the path
                        if not offline and scm_system.remote_branch_exists( repo_location, self._current_branch ):
                            scm_location = location + self._current_branch
                            logger.trace( "scm_location = [{scm_location}]".format(
                                    scm_location=as_info(str(scm_location))
                            ) )
                    elif self._current_revision:
                        # Try to checkout on the explicit branch but if that fails fall back to
                        # to the default by stripping off the '@' from the end of the path
                        if not offline and scm_system.remote_branch_exists( repo_location, self._current_revision ):
                            scm_location = location + self._current_revision
                            logger.trace( "scm_location = [{scm_location}]".format(
                                    scm_location=as_info(str(scm_location))
                            ) )

            elif scm_system and not offline:
                self._default_branch = scm_system.remote_default_branch( repo_location )
                if self._default_branch:
                    scm_location = location + self._default_branch

        elif( scm_system
                and not versioning
                and not offline
                and self.option_set('location_explicit_default_branch')
        ):
            self._default_branch = scm_system.remote_default_branch( repo_location )
            if self._default_branch:
                scm_location = location + '@' + self._default_branch

        location = scm_location

        self._location   = os.path.expanduser( location )
        self._full_url   = urlparse( self._location )
        self._sub_dir    = None
        self._name_hint  = name_hint

        if extra_sub_path:
            if os.path.isabs( extra_sub_path ):
                raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) )
            else:
                self._sub_dir = os.path.normpath( extra_sub_path )

        ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to
        ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source
        ## once this is done
        local_directory = self.get_local_directory( self._location, self._sub_dir, branch_path, self._full_url )

        logger.trace( "Local Directory for [{}] returned as [{}]".format(
                as_notice( self._location ),
                as_notice( local_directory )
        ) )

        self._base_local_directory = local_directory
        self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory

        ## Now that we have a locally accessible version of the dependency we can try to collate some information
        ## about it to allow us to specify what we are building with.
        self._url, self._repository, self._branch, self._remote, self._revision = self.get_info( self._location, self._local_directory, self._full_url )
        self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path )

        logger.debug( "Using [{}]{}{} at [{}] stored in [{}]".format(
                as_info( location ),
                ( self._branch and ":[{}]".format( as_info( str(self._branch) ) ) or "" ),
                ( self._remote and " from [{}]".format( as_info( str(self._remote) ) ) or "" ),
                as_info( self._version ),
                as_notice( self._local_directory )
        ) )