Beispiel #1
0
    def __call__( self, env, libraries ):
        if not self._add_dependents:
            print as_warning( env, "cuppa: boost: warning: BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']

        for library in libraries:
            if library.startswith('log'):
                env.AppendUnique( CPPDEFINES = 'BOOST_LOG_DYN_LINK' )
            elif library == 'chrono':
                env.AppendUnique( CPPDEFINES = 'BOOST_CHRONO_DYN_LINK' )
            elif library == 'filesystem':
                env.AppendUnique( CPPDEFINES = 'BOOST_FILESYSTEM_DYN_LINK' )
            elif library == 'date_time':
                env.AppendUnique( CPPDEFINES = 'BOOST_DATE_TIME_DYN_LINK' )
            elif library == 'system':
                env.AppendUnique( CPPDEFINES = 'BOOST_SYSTEM_DYN_LINK' )

        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'shared' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Beispiel #2
0
def determine_latest_boost_verion( env ):
    current_release = "1.58.0"
    try:
        html = lxml.html.parse('http://www.boost.org/users/download/')

        current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text
        current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) )

        print "cuppa: boost: latest boost release detected as [{}]".format( as_info( env, current_release ) )

    except Exception as e:
        print as_warning( env, "cuppa: boost: warning: cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) )

    return current_release
Beispiel #3
0
def info(path):
    if not path:
        raise SubversionException(
            "No working copy path specified for calling svnversion with.")

    url = None
    repository = None
    branch = None
    revision = None

    try:
        command = "svn info {}".format(path)
        svn_info = subprocess.check_output(shlex.split(command),
                                           stderr=subprocess.STDOUT)
        url = re.search(r'URL: ([^\s]+)', svn_info).expand(r'\1')
        repository = re.search(r'Repository Root: ([^\s]+)',
                               svn_info).expand(r'\1')
        branch = re.search(r'Relative URL: \^/([^\s]+)',
                           svn_info).expand(r'\1')
        revision = re.search(r'Revision: (\d+)', svn_info).expand(r'\1')
    except subprocess.CalledProcessError:
        raise SubversionException("Not a Subversion working copy")

    try:
        command = "svnversion -n {}".format(path)
        revision = subprocess.check_output(shlex.split(command),
                                           stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError:
        pass
    except OSError:
        logger.warn(
            "The {} binary is not available. Consider installing it.".format(
                as_warning("svnversion")))

    return url, repository, branch, revision
Beispiel #4
0
    def __call__( self, env, libraries ):

        if not self._add_dependents:
            print as_warning( env, "cuppa: boost: warning: BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" )
        libraries = Flatten( [ libraries ] )

        if not 'boost' in env['BUILD_WITH']:
            env.BuildWith( 'boost' )
        Boost = env['dependencies']['boost']
        library = BoostLibraryBuilder(
                Boost,
                add_dependents = self._add_dependents,
                verbose_build  = self._verbose_build,
                verbose_config = self._verbose_config )( env, None, None, libraries, 'static' )
        if self._build_always:
            return AlwaysBuild( library )
        else:
            return library
Beispiel #5
0
    def __init__( self, env ):

        url = "hg+https://bitbucket.org/dirkbaechle/scons_qt5"

        try:
            self._location = cuppa.location.Location( env, url, extra_sub_path = "qt5" )
        except cuppa.location.LocationException:
            print as_warning( env, "cuppa: qt5: warning: Could not retrieve url [{}]".format( url ) )
            raise Qt5Exception( "Could not retrieve scons_qt5 from [{}]".format( url ) )

        self._version = "5"

        if cuppa.build_platform.name() in ["Darwin", "Linux"]:
            if not cuppa.output_processor.command_available( "pkg-config" ):
                return
            if 'QT5DIR' not in env:
                self._set_qt5_dir( env )
            self._version = self._get_qt5_version()
Beispiel #6
0
 def update_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
     url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
     rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
     version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
     logger.info( "Updating [{}] in [{}]{} at [{}]".format(
             as_info( location ),
             as_notice( local_dir_with_sub_dir ),
             ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
             as_info( version )
     ) )
     try:
         update( vcs_backend, local_dir_with_sub_dir, rev_options )
         logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
     except pip_exceptions.PipError as error:
         logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                 as_warning( location ),
                 as_warning( local_dir_with_sub_dir ),
                 ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                 as_warning( str(error) )
         ) )
Beispiel #7
0
 def execute_command(cls, command, path=None):
     try:
         logger.trace("Executing command [{command}]...".format(
             command=as_info(command)))
         result = as_str(
             subprocess.check_output(shlex.split(command),
                                     stderr=subprocess.STDOUT,
                                     cwd=path)).strip()
         logger.trace("Result of calling [{command}] was [{result}]".format(
             command=as_info(command), result=as_notice(result)))
         return result
     except subprocess.CalledProcessError as error:
         logger.trace(
             "Command [{command}] failed with exit code [{exit_code}]".
             format(command=as_warning(str(command)),
                    exit_code=as_warning(str(error.returncode))))
         raise cls.Error(
             "Command [{command}] failed".format(command=str(command)))
     except OSError:
         logger.trace("Binary [{git}] is not available".format(
             git=as_warning(cls.binary())))
         raise cls.Error(
             "Binary [{git}] is not available".format(git=cls.binary()))
Beispiel #8
0
def check_current_version():

    installed_version = get_version()
    logger.info("cuppa: version {}".format(as_info(installed_version)))
    try:
        pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
        latest_available = pypi.package_releases('cuppa')[0]
        if parse_version(installed_version) < parse_version(latest_available):
            logger.warn(
                "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                    as_warning(latest_available),
                    as_emphasised("pip install -U cuppa")))
    except:
        pass
Beispiel #9
0
def check_current_version( offline ):

    installed_version = get_version()
    logger.info( "cuppa: version {}".format( as_info( installed_version ) ) )
    if not offline:
        try:
            pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
            latest_available = pypi.package_releases('cuppa')[0]
            if parse_version( installed_version ) < parse_version( latest_available ):
                logger.warn( "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                        as_warning( latest_available ),
                        as_emphasised( "pip install -U cuppa" )
                ) )
        except:
            pass
Beispiel #10
0
    def get_local_directory(self, cuppa_env, location, sub_dir, branch,
                            full_url):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs(base):
            base = os.path.join(cuppa_env['working_dir'], base)

        if location.startswith('file:'):
            location = pip_download.url_to_path(location)

        if not pip_is_url(location):

            if pip_is_archive_file(location):

                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)
                local_directory = os.path.join(base, self._local_folder)

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        return local_directory

                self.extract(location, local_dir_with_sub_dir)
                logger.debug("(local archive) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local archive) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            else:
                local_directory = branch and os.path.join(location,
                                                          branch) or location
                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)

                logger.debug("(local file) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local file) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path(
                full_url, cuppa_env)
            local_directory = os.path.join(base, self._local_folder)

            if full_url.scheme.startswith(
                    'http') and self.url_is_download_archive_url(
                        full_url.path):
                logger.debug("[{}] is an archive download".format(
                    as_info(location)))

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        # If not empty this will fail
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug(
                            "(already present) Location = [{}]".format(
                                as_info(location)))
                        logger.debug(
                            "(already present) Local folder = [{}]".format(
                                as_info(str(self._local_folder))))

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive(
                    cuppa_env['cache_root'], self._local_folder)
                if cached_archive:
                    logger.debug("Cached archive [{}] found for [{}]".format(
                        as_info(cached_archive), as_info(location)))
                    self.extract(cached_archive, local_dir_with_sub_dir)
                else:
                    logger.info("Downloading [{}]...".format(
                        as_info(location)))
                    try:
                        report_hook = None
                        if logger.isEnabledFor(logging.INFO):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urlretrieve(location,
                                                        reporthook=report_hook)
                        name, extension = os.path.splitext(filename)
                        logger.info(
                            "[{}] successfully downloaded to [{}]".format(
                                as_info(location), as_info(filename)))
                        self.extract(filename, local_dir_with_sub_dir)
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join(
                                cuppa_env['cache_root'], self._local_folder)
                            logger.debug(
                                "Caching downloaded file as [{}]".format(
                                    as_info(cached_archive)))
                            shutil.copyfile(filename, cached_archive)
                    except ContentTooShortError as error:
                        logger.error(
                            "Download of [{}] failed with error [{}]".format(
                                as_error(location), as_error(str(error))))
                        raise LocationException(error)

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend(vc_type)
                if backend:
                    try:
                        vcs_backend = backend(self.expand_secret(location))
                    except:  # Pip version >= 19
                        backend.url = self.expand_secret(location)
                        vcs_backend = backend
                    local_dir_with_sub_dir = os.path.join(
                        local_directory, sub_dir and sub_dir or "")

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists(local_directory):
                        url, repository, branch, remote, revision = self.get_info(
                            location, local_dir_with_sub_dir, full_url,
                            vc_type)
                        rev_options = self.get_rev_options(vc_type,
                                                           vcs_backend,
                                                           local_remote=remote)
                        version = self.ver_rev_summary(branch, revision,
                                                       self._full_url.path)[0]
                        if not offline:
                            logger.info(
                                "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info(location),
                                    as_notice(local_dir_with_sub_dir),
                                    (rev_options and " on {}".format(
                                        as_notice(str(rev_options))) or ""),
                                    as_info(version)))
                            try:
                                update(vcs_backend, local_dir_with_sub_dir,
                                       rev_options)
                                logger.debug(
                                    "Successfully updated [{}]".format(
                                        as_info(location)))
                            except pip_exceptions.PipError as error:
                                logger.warn(
                                    "Could not update [{}] in [{}]{} due to error [{}]"
                                    .format(as_warning(location),
                                            as_warning(local_dir_with_sub_dir),
                                            (rev_options and " at {}".format(
                                                as_warning(str(rev_options)))
                                             or ""), as_warning(str(error))))
                        else:
                            logger.debug(
                                "Skipping update for [{}] as running in offline mode"
                                .format(as_info(location)))
                    else:
                        rev_options = self.get_rev_options(
                            vc_type, vcs_backend)
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info("{} [{}] into [{}]{}".format(
                                action, as_info(location),
                                as_info(local_dir_with_sub_dir), attempt > 1
                                and "(attempt {})".format(str(attempt)) or ""))
                            try:
                                obtain(vcs_backend, local_dir_with_sub_dir,
                                       vcs_backend.url)
                                logger.debug(
                                    "Successfully retrieved [{}]".format(
                                        as_info(location)))
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as(
                                    "Could not retrieve [{}] into [{}]{} due to error [{}]"
                                    .format(as_info(location),
                                            as_notice(local_dir_with_sub_dir),
                                            (rev_options and " to {}".format(
                                                as_notice(str(rev_options)))
                                             or ""), as_error(str(error))))
                                if attempt > max_attempts:
                                    raise LocationException(str(error))

                logger.debug("(url path) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(url path) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
Beispiel #11
0
    def add_to_env( cls, env, add_dependency ):
        build_always     = env.get_option( 'boost-build-always' )
        verbose_build    = env.get_option( 'boost-verbose-build' )
        verbose_config   = env.get_option( 'boost-verbose-config' )
        patch_boost_test = env.get_option( 'boost-patch-boost-test' )

        boost_location = env.get_option( 'boost-location' )
        boost_home     = env.get_option( 'boost-home' )
        boost_version  = env.get_option( 'boost-version' )
        boost_latest   = env.get_option( 'boost-latest' )
        thirdparty     = env[ 'thirdparty' ]

        boost = None
        try:
            if boost_location:
                boost = cls( env, env[ 'platform' ],
                           location = boost_location,
                           version  = boost_version,
                           patch_test = patch_boost_test )
            elif boost_home:
                boost = cls( env, env[ 'platform' ],
                           base = boost_home,
                           patch_test = patch_boost_test )
            elif thirdparty and boost_version:
                boost = cls( env, env[ 'platform' ],
                           base = thirdparty,
                           version = boost_version,
                           patch_test = patch_boost_test )
            elif boost_version:
                boost = cls( env, env[ 'platform' ],
                           version = boost_version,
                           patch_test = patch_boost_test )
            elif boost_latest:
                boost = cls( env, env[ 'platform' ],
                           version = 'latest',
                           patch_test = patch_boost_test )

        except BoostException as e:
            print as_warning( env, "cuppa: boost: warning: Could not create boost dependency - {}".format(e) )

        add_dependency( 'boost', boost )

        env.AddMethod(
                BoostStaticLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostStaticLibrary" )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostSharedLibrary" )
        env.AddMethod(
                BoostStaticLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostStaticLib" )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostSharedLib" )
        env.AddMethod(
                BoostStaticLibraryMethod(
                        add_dependents=True,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostStaticLibs" )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=True,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config ),
                "BoostSharedLibs" )
Beispiel #12
0
 def add_to_env( cls, env, add_dependency  ):
     try:
         add_dependency( cls._name, cls( env ) )
     except Qt5Exception:
         print as_warning( env, "cuppa: warning: Could not create dependency [{}]. Dependency not available.".format( cls._name ) )
Beispiel #13
0
def info( path ):
    if not path:
        raise SubversionException("No working copy path specified for calling svnversion with.")

    url        = None
    repository = None
    branch     = None
    revision   = None

    try:
        command = "svn info {}".format( path )
        svn_info = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT )
        url        = re.search( r'URL: ([^\s]+)', svn_info ).expand(r'\1')
        repository = re.search( r'Repository Root: ([^\s]+)', svn_info ).expand(r'\1')
        branch     = re.search( r'Relative URL: \^/([^\s]+)', svn_info ).expand(r'\1')
        revision   = re.search( r'Revision: (\d+)', svn_info ).expand(r'\1')
    except subprocess.CalledProcessError:
        raise SubversionException("Not a Subversion working copy")

    try:
        command = "svnversion -n {}".format( path )
        revision = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT )
    except subprocess.CalledProcessError:
        pass
    except OSError:
        logger.warn( "The {} binary is not available. Consider installing it.".format( as_warning("svnversion") ) )

    return url, repository, branch, revision
Beispiel #14
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip.download.url_to_path( location )

        if not pip.download.is_url( location ):

            if pip.download.is_archive_file( location ):

                local_folder = self.folder_name_from_path( location )
                local_directory = os.path.join( base, local_folder )

                if os.path.exists( local_directory ):
                    try:
                        os.rmdir( local_directory )
                    except:
                        return local_directory, False

                self.extract( location, local_directory )
            else:
                local_directory = branch and os.path.join( location, branch ) or location
                return local_directory, False
        else:

            local_folder = self.folder_name_from_path( full_url )
            local_directory = os.path.join( base, local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory
                        return local_directory, True

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip.vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( location )
                    rev_options = self.get_rev_options( vc_type, vcs_backend )

                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                    if os.path.exists( local_directory ):

                        url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        logger.debug( "Updating [{}] in [{}]{} at [{}]".format(
                                as_info( location ),
                                as_notice( local_dir_with_sub_dir ),
                                ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                as_info( version )
                        ) )
                        try:
                            vcs_backend.update( local_dir_with_sub_dir, rev_options )
                            logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                    as_warning( location ),
                                    as_warning( local_dir_with_sub_dir ),
                                    ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                    as_warning( str(error) )
                            ) )
                    else:
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        logger.info( "{} [{}] into [{}]".format(
                                action, as_info( location ),
                                as_info( local_dir_with_sub_dir )
                        ) )
                        try:
                            vcs_backend.obtain( local_dir_with_sub_dir )
                            logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                    as_error( location ),
                                    as_error( local_dir_with_sub_dir ),
                                    ( rev_options and  " to {}".format( as_error(  str(rev_options) ) ) or ""),
                                    as_error( str( error ) )
                            ) )
                            raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            return local_directory, True
Beispiel #15
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip_download.url_to_path( location )

        if not pip_download.is_url( location ):

            if pip_download.is_archive_file( location ):

                self._local_folder = self.folder_name_from_path( location, cuppa_env )
                local_directory = os.path.join( base, self._local_folder )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        return local_directory

                self.extract( location, local_dir_with_sub_dir )
                logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            else:
                local_directory = branch and os.path.join( location, branch ) or location
                self._local_folder = self.folder_name_from_path( location, cuppa_env )

                logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path( full_url, cuppa_env )
            local_directory = os.path.join( base, self._local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) )
                        logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) )

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( error )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( self.expand_secret( location ) )
                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists( local_directory ):
                        url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
                        rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        if not offline:
                            logger.info( "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info( location ),
                                    as_notice( local_dir_with_sub_dir ),
                                    ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                    as_info( version )
                            ) )
                            try:
                                update( vcs_backend, local_dir_with_sub_dir, rev_options )
                                logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                            except pip_exceptions.PipError as error:
                                logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                        as_warning( location ),
                                        as_warning( local_dir_with_sub_dir ),
                                        ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                        as_warning( str(error) )
                                ) )
                        else:
                            logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) )
                    else:
                        rev_options = self.get_rev_options( vc_type, vcs_backend )
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info( "{} [{}] into [{}]{}".format(
                                    action,
                                    as_info( location ),
                                    as_info( local_dir_with_sub_dir ),
                                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
                            ) )
                            try:
                                vcs_backend.obtain( local_dir_with_sub_dir )
                                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                        as_info( location ),
                                        as_notice( local_dir_with_sub_dir ),
                                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                                        as_error( str(error) )
                                ) )
                                if attempt > max_attempts:
                                    raise LocationException( str(error) )

                logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
Beispiel #16
0
    def get_branch(cls, path):
        branch = None
        remote = None

        head_detached = False
        command = "{git} branch".format(git=cls.binary())
        branch_info = cls.execute_command(command, path)
        if branch_info:
            match = re.search(r'^[*] [(]HEAD detached ', branch_info)
            if match:
                head_detached = True

        if not head_detached:
            result = cls.execute_command(
                "{git} status -sb".format(git=cls.binary()), path)
            if result:
                match = re.search(
                    r'## (?P<branch>[^)]+)[.][.][.](?P<remote>[^)\n]+)',
                    result)
                if match:
                    branch = match.group("branch")
                    remote = match.group("remote")
                match = re.search(r'## HEAD (no branch)', result)
                # Check if we are rebasing
                if match:
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command, path)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))

            return branch, remote

        else:
            result = cls.execute_command(
                "{git} show -s --pretty=\%d --decorate=full HEAD".format(
                    git=cls.binary()), path)

            match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?',
                              result)

            if match and match.group("refs"):
                refs = [{
                    "ref": r.strip(),
                    "type": ""
                } for r in match.group("refs").split(',')]
                logger.trace("Refs (using show) for [{}] are [{}]".format(
                    as_notice(path), colour_items((r["ref"] for r in refs))))
                if refs:
                    for ref in refs:
                        if ref["ref"].startswith("refs/heads/"):
                            ref["ref"] = ref["ref"][len("refs/heads/"):]
                            ref["type"] = "L"
                        elif ref["ref"].startswith("refs/tags/"):
                            ref["ref"] = ref["ref"][len("refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("tag: refs/tags/"):
                            ref["ref"] = ref["ref"][len("tag: refs/tags/"):]
                            ref["type"] = "T"
                        elif ref["ref"].startswith("refs/remotes/"):
                            ref["ref"] = ref["ref"][len("refs/remotes/"):]
                            ref["type"] = "R"
                        else:
                            ref["type"] = "U"

                    logger.trace(
                        "Refs (after classification) for [{}] are [{}]".format(
                            as_notice(path),
                            colour_items((":".join([r["type"], r["ref"]])
                                          for r in refs))))

                    if refs[0]["type"] == "L":
                        branch = refs[0]["ref"]
                    #elif refs[0]["type"] == "T":
                    #branch = refs[0]["ref"]
                    elif refs[0]["type"] == "R":
                        branch = refs[0]["ref"].split('/')[1]

                    remote = next(
                        (ref["ref"] for ref in refs if ref["type"] == "R"),
                        None)

                logger.trace("Branch (using show) for [{}] is [{}]".format(
                    as_notice(path), as_info(str(branch))))
            else:
                if result == "(HEAD)":
                    command = "{git} branch".format(git=cls.binary())
                    branch_info = cls.execute_command(command)
                    if branch_info:
                        match = re.search(
                            r'(no branch, rebasing (?P<branch>[^)]+))',
                            branch_info)
                        if match:
                            branch = match.group("branch")
                            logger.warn(
                                as_warning(
                                    "Currently rebasing branch [{}]".format(
                                        branch)))
        #if not branch:
        #logger.warn( as_warning( "No branch found from [{}]".format( result ) ) )

        return branch, remote