示例#1
0
def lazy_create_path( path ):
    if not os.path.exists( path ):
        try:
            os.makedirs( path )
        except os.error as e:
            if not os.path.exists( path ):
                logger.error( "Could not create path [{}]. Failed with error [{}]".format( as_notice(path), as_error(str(e)) ) )
示例#2
0
文件: path.py 项目: ja11sop/cuppa
def lazy_create_path( path ):
    if not os.path.exists( path ):
        try:
            os.makedirs( path )
        except os.error as e:
            if not os.path.exists( path ):
                logger.error( "Could not create path [{}]. Failed with error [{}]".format( as_notice(path), as_error(str(e)) ) )
示例#3
0
    def Popen2(cls, stdout_processor, stderr_processor, args_list, **kwargs):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        sys.stdout = AutoFlushFile(colorama.initialise.wrapped_stdout)
        sys.stderr = AutoFlushFile(colorama.initialise.wrapped_stderr)

        try:
            process = subprocess.Popen(args_list, **kwargs)

            stderr_consumer = LineConsumer(process.stderr.readline,
                                           stderr_processor)
            stdout_consumer = LineConsumer(process.stdout.readline,
                                           stdout_processor)

            stderr_thread = threading.Thread(target=stderr_consumer)
            stderr_thread.start()
            stdout_consumer()
            stderr_thread.join()

            process.wait()

            return process.returncode

        except Exception as e:
            logger.error(
                "output_processor: IncrementalSubProcess.Popen2() failed with error [{}]"
                .format(str(e)))
示例#4
0
def apply_patch_if_needed(home, version_string):

    patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt")

    expected_diff_file = os.path.join(
        os.path.split(__file__)[0],
        "boost_test_patch_{}.diff".format(version_string))

    available_diff_files = sorted(glob.glob(
        os.path.join(os.path.split(__file__)[0], "boost_test_patch_*.diff")),
                                  reverse=True)

    for diff_file in available_diff_files:
        if diff_file <= expected_diff_file:
            break

    logger.debug("Using diff file [{}]".format(as_info(diff_file)))

    if os.path.exists(patch_applied_path):
        logger.debug("[{}] already applied".format(as_info(diff_file)))
        return

    command = "patch --batch -p1 --input={}".format(diff_file)

    logger.info("Applying [{}] using [{}] in [{}]".format(
        as_info(diff_file), as_info(command), as_info(home)))

    if subprocess.call(shlex.split(command), cwd=home) != 0:
        logger.error("Could not apply [{}]".format(diff_file))
    else:
        with open(patch_applied_path, "w") as patch_applied_file:
            pass
示例#5
0
def __call_classmethod_for_classes_in_module( package, name, path, method, *args, **kwargs ):
    try:
        filehandle, pathname, description = imp.find_module( name, path and [ path ] or None )
        try:
            try:
                qualified_name = package and package + "." + name or name
                module = sys.modules[ qualified_name ]

            except KeyError, (e):
                module = imp.load_module( name, filehandle, pathname, description )

            for member_name in dir( module ):

                member = getattr( module, member_name )

                if inspect.ismodule( member ):
                    if package:
                        parent_package = package + "." + name
                    else:
                        parent_package = name
                    __call_classmethod_for_classes_in_module( parent_package, member_name, pathname, method, *args, **kwargs )

                elif inspect.isclass( member ):
                    try:
                        function = getattr( member, method )
                        if callable( function ):
                            try:
                                function( *args, **kwargs )
                            except Exception as error:
                                logger.error( "{} in {} failed with error [{}]".format( method, member, str(error) ) )
                    except AttributeError, (e):
                        pass
        finally:
            if filehandle:
                filehandle.close()
示例#6
0
    def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        sys.stdout = AutoFlushFile( colorama.initialise.wrapped_stdout )
        sys.stderr = AutoFlushFile( colorama.initialise.wrapped_stderr )

        try:
            process = subprocess.Popen(
                args_list,
                **kwargs
            )

            stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor )
            stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor )

            stderr_thread = threading.Thread( target=stderr_consumer )
            stderr_thread.start()
            stdout_consumer();
            stderr_thread.join()

            process.wait()

            return process.returncode

        except Exception as e:
            logger.error( "output_processor: IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) )
示例#7
0
 def create( cls, env ):
     try:
         if not cls._qt4_tool:
             cls._qt4_tool = cls.retrieve_tool( env )
         return build_with_qt4( env )
     except Qt4Exception:
         logger.error( "Could not create dependency [{}]. Dependency not available.".format( cls._name ) )
     return None
示例#8
0
文件: stdcpp.py 项目: ja11sop/cuppa
 def __call__( self, env, standard ):
     if standard not in self.stdcpp_choices:
         logger.error( "[{}] not in allowed list {}".format( as_error( standard ), as_notice( self.stdcpp_choices ) ) )
         return None
     env[ 'stdcpp' ] = standard
     toolchain = env['toolchain']
     flag = toolchain.stdcpp_flag_for( standard )
     env.ReplaceFlags( [ flag ] )
     return None
示例#9
0
    def get_local_directory_for_download_url(self, location, sub_dir,
                                             local_directory):

        logger.debug("[{}] is an archive download".format(as_info(location)))

        local_dir_with_sub_dir = os.path.join(local_directory,
                                              sub_dir and sub_dir or "")

        # First we check to see if we already downloaded and extracted this archive before
        if os.path.exists(local_dir_with_sub_dir):
            try:
                # If not empty this will fail
                os.rmdir(local_dir_with_sub_dir)
            except:
                # Not empty so we'll return this as the local_directory

                logger.debug("(already present) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(already present) Local folder = [{}]".format(
                    as_info(str(self._local_folder))))

                return local_directory

        if self._cuppa_env['dump'] or self._cuppa_env['clean']:
            return local_directory

        # If not we then check to see if we cached the download
        cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'],
                                                 self._local_folder)
        if cached_archive:
            logger.debug("Cached archive [{}] found for [{}]".format(
                as_info(cached_archive), as_info(location)))
            self.extract(cached_archive, local_dir_with_sub_dir)
        else:
            logger.info("Downloading [{}]...".format(as_info(location)))
            try:
                report_hook = None
                if logger.isEnabledFor(logging.INFO):
                    report_hook = ReportDownloadProgress()
                filename, headers = urlretrieve(location,
                                                reporthook=report_hook)
                name, extension = os.path.splitext(filename)
                logger.info("[{}] successfully downloaded to [{}]".format(
                    as_info(location), as_info(filename)))
                self.extract(filename, local_dir_with_sub_dir)
                if self._cuppa_env['cache_root']:
                    cached_archive = os.path.join(
                        self._cuppa_env['cache_root'], self._local_folder)
                    logger.debug("Caching downloaded file as [{}]".format(
                        as_info(cached_archive)))
                    shutil.copyfile(filename, cached_archive)
            except ContentTooShortError as error:
                logger.error("Download of [{}] failed with error [{}]".format(
                    as_error(location), as_error(str(error))))
                raise LocationException(error)

        return local_directory
示例#10
0
 def _read(cls, json_report_path, default={}):
     with open(json_report_path, "r") as report:
         try:
             report = json.load(report)
             return report
         except ValueError as error:
             logger.error(
                 "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing"
                 .format(as_info(json_report_path), as_error(str(error))))
     return default
示例#11
0
 def __call__(self, env, standard):
     if standard not in self.stdcpp_choices:
         logger.error("[{}] not in allowed list {}".format(
             as_error(standard), as_notice(self.stdcpp_choices)))
         return None
     env['stdcpp'] = standard
     toolchain = env['toolchain']
     flag = toolchain.stdcpp_flag_for(standard)
     env.ReplaceFlags([flag])
     return None
示例#12
0
 def retrieve_repo_info(cls, vcs_system, vcs_directory, expected_vc_type):
     if not expected_vc_type or expected_vc_type == vcs_system.vc_type():
         try:
             info = vcs_system.info(vcs_directory)
             return info
         except vcs_system.Error as ex:
             if expected_vc_type:
                 logger.error(
                     "Failed to retreive info for [{}] because [{}]".format(
                         as_error(vcs_directory), as_error(str(ex))))
                 raise
             return None
示例#13
0
 def _read( cls, json_report_path, default={} ):
     with open( json_report_path, "r" ) as report:
         try:
             report = json.load( report )
             return report
         except ValueError as error:
             logger.error(
                 "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing".format(
                 as_info( json_report_path ),
                 as_error( str(error) )
             ) )
     return default
示例#14
0
文件: location.py 项目: ja11sop/cuppa
 def retrieve_repo_info( cls, vcs_system, vcs_directory, expected_vc_type ):
     if not expected_vc_type or expected_vc_type == vcs_system.vc_type():
         try:
             info = vcs_system.info( vcs_directory )
             return info
         except vcs_system.Error as ex:
             if expected_vc_type:
                 logger.error( "Failed to retreive info for [{}] because [{}]".format(
                         as_error( vcs_directory ),
                         as_error( str(ex) )
                 ) )
                 raise
             return None
示例#15
0
    def add_to_env( cls, env ):
        try:
            generate = env.get_option( 'generate-cbs' )
            if generate:
                obj = cls( env,
                           env.get_option( 'generate_cbs_include_thirdparty' ),
                           env.get_option( 'generate_cbs_exclude_relative_branches' ),
                           env.get_option( 'generate_cbs_exclude_paths_starting' ),
                           env.get_option( 'generate_cbs_place_with_sconscript' ) )

                env['project_generators']['codeblocks'] = obj

        except CodeblocksException as error:
            logger.error( "Failed to create CodeBlocks project generator with error [{}]".format( as_error(error) ) )
示例#16
0
def __call_classmethod_for_classes_in_module(package, name, path, method,
                                             *args, **kwargs):
    try:
        filehandle, pathname, description = imp.find_module(
            name, path and [path] or None)
        try:
            try:
                qualified_name = package and package + "." + name or name
                module = sys.modules[qualified_name]

            except KeyError as error:
                module = imp.load_module(name, filehandle, pathname,
                                         description)

            for member_name in dir(module):

                member = getattr(module, member_name)

                if inspect.ismodule(member):
                    if package:
                        parent_package = package + "." + name
                    else:
                        parent_package = name
                    __call_classmethod_for_classes_in_module(
                        parent_package, member_name, pathname, method, *args,
                        **kwargs)

                elif inspect.isclass(member):
                    try:
                        function = getattr(member, method)
                        if callable(function):
                            try:
                                function(*args, **kwargs)
                            except Exception as error:
                                if logger.isEnabledFor(logging.EXCEPTION):
                                    logger.error(
                                        "[{}] in [{}] failed with error [{}]".
                                        format(as_info(str(method)),
                                               as_notice(str(member)),
                                               as_info(str(error))))
                                    traceback.print_exc()
                                raise error
                    except AttributeError as ignore:
                        pass
        finally:
            if filehandle:
                filehandle.close()

    except ImportError as error:
        pass
示例#17
0
 def create( cls, env ):
     if not cls._location:
         location = env.get_option( cls._name + "-location" )
         if not location:
             logger.error( "Dependency not available - no location specified" )
             return None
         try:
             cls._location = cuppa.location.Location( env, location )
         except cuppa.location.LocationException as error:
             logger.error(
                 "Dependency not available - retrieving location failed with error [{}]."
                 .format( str(error) )
             )
             return None
     return build_with_quince( env )
示例#18
0
 def create(cls, env):
     if not cls._location:
         location = env.get_option(cls._name + "-location")
         if not location:
             logger.error(
                 "Dependency not available - no location specified")
             return None
         try:
             cls._location = cuppa.location.Location(env, location)
         except cuppa.location.LocationException as error:
             logger.error(
                 "Dependency not available - retrieving location failed with error [{}]."
                 .format(str(error)))
             return None
     return build_with_quince(env)
示例#19
0
    def add_to_env( cls, env ):
        try:
            generate = env.get_option( 'generate-cbs' )
            if generate:
                obj = cls( env,
                           env.get_option( 'generate_cbs_include_thirdparty' ),
                           env.get_option( 'generate_cbs_exclude_relative_branches' ),
                           env.get_option( 'generate_cbs_exclude_paths_starting' ),
                           env.get_option( 'generate_cbs_place_with_sconscript' ),
                           env.get_option( 'generate_cbs_exclude_cc_search_paths' ),
                           env.get_option( 'generate_cbs_exclude_cc_sys_search_paths' ) )

                env['project_generators']['codeblocks'] = obj

        except CodeblocksException as error:
            logger.error( "Failed to create CodeBlocks project generator with error [{}]".format( as_error(error) ) )
    def _get_location( cls, env ):
        location_id = cls.location_id( env )
        if not location_id:
            return None
        if location_id not in cls._cached_locations:
            location = location_id[0]
            branch = location_id[1]
            try:
                cls._cached_locations[location_id] = cuppa.location.Location( env, location, branch )
            except cuppa.location.LocationException as error:
                logger.error(
                        "Could not get location for [{}] at [{}] with branch [{}]. Failed with error [{}]"
                        .format( as_notice( cls._name.title() ), as_notice( str(location) ), as_notice( str(branch) ), as_error( error ) )
                )
                return None

        return cls._cached_locations[location_id]
示例#21
0
def process_storage_options( cuppa_env ):

        def get_normal_path( option, defaults_to ):
            path = cuppa_env.get_option( option, default=defaults_to )
            return os.path.normpath( os.path.expanduser( path ) )

        cuppa_env['build_root']     = get_normal_path( 'build_root', default.build_root )
        cuppa_env['abs_build_root'] = os.path.abspath( cuppa_env['build_root'] )
        cuppa_env['download_root']  = get_normal_path( 'download_root', default.download_root )
        cuppa_env['cache_root']     = get_normal_path( 'cache_root', default.cache_root )

        if not os.path.exists( cuppa_env['cache_root'] ):
            try:
                os.makedirs( cuppa_env['cache_root'] )
            except os.error as e:
                logger.error( "Creating cache_root directory [{}] failed with error: {}"
                             .format( cuppa_env['cache_root'], as_error(str(e)) ) )
                raise
示例#22
0
def process_storage_options(cuppa_env):
    def get_normal_path(option, defaults_to):
        path = cuppa_env.get_option(option, default=defaults_to)
        return os.path.normpath(os.path.expanduser(path))

    cuppa_env['build_root'] = get_normal_path('build_root', default.build_root)
    cuppa_env['abs_build_root'] = os.path.abspath(cuppa_env['build_root'])
    cuppa_env['download_root'] = get_normal_path('download_root',
                                                 default.download_root)
    cuppa_env['cache_root'] = get_normal_path('cache_root', default.cache_root)

    if not os.path.exists(cuppa_env['cache_root']):
        try:
            os.makedirs(cuppa_env['cache_root'])
        except os.error as e:
            logger.error(
                "Creating cache_root directory [{}] failed with error: {}".
                format(cuppa_env['cache_root'], as_error(str(e))))
            raise
示例#23
0
文件: __init__.py 项目: kjing/cuppa
def run(*args, **kwargs):
    from inspect import getframeinfo, stack
    caller = getframeinfo(stack()[1][0])
    sconsctruct_path = caller.filename
    import traceback
    from cuppa.log import logger, initialise_logging
    from cuppa.colourise import as_info
    import logging
    initialise_logging()
    try:
        import cuppa.core
        cuppa.core.run(sconsctruct_path, *args, **kwargs)
    except Exception as error:
        logger.error("Cuppa terminated by exception [{}: {}]".format(
            as_info(error.__class__.__name__), as_info(str(error))))
        if not logger.isEnabledFor(logging.EXCEPTION):
            logger.error("Use {} (or above) to see the stack".format(
                as_info("--verbosity=exception")))
        logger.exception(traceback.format_exc())
示例#24
0
    def __call__(self, option, opt, value, parser):
        toolchains = set()
        requested = value.split(',')
        for toolchain in requested:
            supported = fnmatch.filter( self._supported, toolchain )

            if not supported:
                logger.warn( "Requested toolchain [{}] does not match any supported, skipping".format( as_info(toolchain) ) )
            else:
                available = fnmatch.filter( self._available, toolchain )

                if not available:
                    logger.warn( "Requested toolchain [{}] does not match any available, skipping".format( as_info(toolchain) ) )
                else:
                    toolchains.update( available )

        if not toolchains:
            logger.error( "None of the requested toolchains are available" )

        parser.values.toolchains = list(toolchains)
示例#25
0
    def get_flags(cls, location):

        flags = {}
        flags['INCPATH'] = [os.path.join(location.local(), "include")]

        pg_config = "pg_config"
        if platform.system() == "Windows":
            pg_config = pg_config + ".exe"
            if not cuppa.output_processor.command_available(pg_config):
                # try to find the Postgresql install
                program_files = os.environ.get("ProgramW6432")
                postgresql_base = os.path.join(program_files, "PostgreSQL")
                if os.path.exists(postgresql_base):
                    paths = glob.glob(postgresql_base + '\\*')
                    if len(paths):
                        paths.sort()
                        latest = paths[-1]
                        pg_config = '\"' + os.path.join(
                            latest, "bin", pg_config) + '\"'

        if cuppa.output_processor.command_available(pg_config):
            command = "{pg_config} --includedir".format(pg_config=pg_config)
            libpq_include = as_str(
                subprocess.check_output(shlex.split(command),
                                        stderr=subprocess.STDOUT).strip())
            flags['INCPATH'].append(libpq_include)

            command = "{pg_config} --libdir".format(pg_config=pg_config)
            libpq_libpath = as_str(
                subprocess.check_output(shlex.split(command),
                                        stderr=subprocess.STDOUT).strip())
            flags['LIBPATH'] = [libpq_libpath]
        else:
            logger.error(
                "postgresql: pg_config not available so cannot determine LIBPATH for postgres libraries"
            )
            raise QuinceException("pg_config not available")

        flags['DYNAMICLIBS'] = ['pq']

        return flags
示例#26
0
def apply_patch_if_needed(home):

    patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt")
    diff_file = "boost_test_patch.diff"

    if os.path.exists(patch_applied_path):
        logger.debug("[{}] already applied".format(as_info(diff_file)))
        return

    diff_path = os.path.join(os.path.split(__file__)[0], "boost", diff_file)

    command = "patch --batch -p1 --input={}".format(diff_path)

    logger.info("Applying [{}] using [{}] in [{}]".format(
        as_info(diff_file), as_info(command), as_info(home)))

    if subprocess.call(shlex.split(command), cwd=home) != 0:
        logger.error("Could not apply [{}]".format(diff_file))

    with open(patch_applied_path, "w") as patch_applied_file:
        pass
示例#27
0
文件: location.py 项目: iCodeIN/cuppa
    def retrieve_repo_info( cls, vcs_system, vcs_directory, expected_vc_type ):
        if not expected_vc_type or expected_vc_type == vcs_system.vc_type():
            try:
                logger.trace( "expected_vc_type=[{expected_vc_type}], vcs_system=[{vc_type}], vcs_directory=[{directory}]".format(
                        expected_vc_type=as_info( str(expected_vc_type) ),
                        vc_type=as_info( vcs_system and vcs_system.vc_type() or "None" ),
                        directory=as_notice( str(vcs_directory) )
                ) )

                info = vcs_system.info( vcs_directory )

                logger.trace( "vcs_info=[{vcs_info}]".format( vcs_info=as_info(str(info)) ) )

                return info
            except vcs_system.Error as ex:
                if expected_vc_type:
                    logger.error( "Failed to retreive info for [{}] because [{}]".format(
                            as_error( vcs_directory ),
                            as_error( str(ex) )
                    ) )
                    raise
                return None
示例#28
0
    def _get_location(cls, env):

        import SCons.Errors

        location_id = cls.location_id(env)
        if not location_id:
            return None
        if location_id not in cls._cached_locations:
            location = location_id[0]
            develop = location_id[1]
            branch = location_id[2]
            use_develop = location_id[3]
            try:
                cls._cached_locations[location_id] = cuppa.location.Location(
                    env,
                    location,
                    develop=develop,
                    branch=branch,
                    extra_sub_path=cls._extra_sub_path)
                logger.debug(
                    "Adding location [{}]({}) to cached locations".format(
                        as_notice(cls._name.title()),
                        as_notice(str(location_id))))
            except cuppa.location.LocationException as error:
                logger.error(
                    "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]"
                    .format(as_notice(cls._name.title()),
                            as_info(str(location)), as_info(str(develop)),
                            as_notice(str(use_develop and True or False)),
                            as_notice(str(branch)),
                            as_notice(str(cls._extra_sub_path)),
                            as_error(str(error))))
                raise SCons.Errors.StopError(error)
        else:
            logger.debug(
                "Loading location [{}]({}) from cached locations".format(
                    as_notice(cls._name.title()), as_notice(str(location_id))))

        return cls._cached_locations[location_id]
示例#29
0
    def _get_location( cls, env ):

        import SCons.Errors

        location_id = cls.location_id( env )
        if not location_id:
            return None
        if location_id not in cls._cached_locations:
            location = location_id[0]
            develop = location_id[1]
            branch = location_id[2]
            use_develop = location_id[3]
            try:
                cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path )
                logger.debug( "Adding location [{}]({}) to cached locations".format(
                        as_notice( cls._name.title() ),
                        as_notice( str(location_id) )
                ) )
            except cuppa.location.LocationException as error:
                logger.error(
                        "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]"
                        .format(
                                as_notice( cls._name.title() ),
                                as_info( str(location) ),
                                as_info( str(develop) ),
                                as_notice( str(use_develop and True or False) ),
                                as_notice( str(branch) ),
                                as_notice( str(cls._extra_sub_path) ),
                                as_error( str(error) )
                        )
                )
                raise SCons.Errors.StopError( error )
        else:
            logger.debug( "Loading location [{}]({}) from cached locations".format(
                    as_notice( cls._name.title() ),
                    as_notice( str(location_id) )
            ) )

        return cls._cached_locations[location_id]
示例#30
0
    def __init__( self, env ):

        self._version = "4"

        if cuppa.build_platform.name() in ["Darwin", "Linux"]:
            if cuppa.output_processor.command_available( "pkg-config" ):
                if 'QT4DIR' not in env:
                    self._set_qt4_dir( env )
                self._version = self._get_qt4_version()

        elif cuppa.build_platform.name() == "Windows":
            if 'QT4DIR' not in env:
                paths = glob.glob( 'C:\\Qt\\4.*\\*' )
                if len(paths):
                    paths.sort()
                    env['QT4DIR'] = paths[-1]

        if 'QT4DIR' not in env:
            logger.error( "could not detect QT4 installation" )
            raise Qt4Exception( "could not detect QT4 installation." )

        logger.debug( "Q4DIR detected as [{}]".format( as_info( env['QT4DIR'] ) ) )
示例#31
0
    def _get_location(cls, env):
        location_id = cls.location_id(env)
        if not location_id:
            return None
        if location_id not in cls._cached_locations:
            location = location_id[0]
            branch = location_id[1]
            try:
                cls._cached_locations[location_id] = cuppa.location.Location(
                    env,
                    location,
                    branch=branch,
                    extra_sub_path=cls._extra_sub_path)
            except cuppa.location.LocationException as error:
                logger.error(
                    "Could not get location for [{}] at [{}] with branch [{}] and extra sub path [{}]. Failed with error [{}]"
                    .format(as_notice(cls._name.title()),
                            as_notice(str(location)), as_notice(str(branch)),
                            as_notice(str(cls._extra_sub_path)),
                            as_error(error)))
                return None

        return cls._cached_locations[location_id]
示例#32
0
def apply_patch_if_needed( home, version_string ):

    patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" )

    expected_diff_file = os.path.join(
            os.path.split( __file__ )[0],
            "boost_test_patch_{}.diff".format( version_string )
    )

    available_diff_files = sorted( glob.glob( os.path.join(
            os.path.split( __file__ )[0],
            "boost_test_patch_*.diff"
    ) ), reverse=True )

    for diff_file in available_diff_files:
        if diff_file <= expected_diff_file:
            break

    logger.debug( "Using diff file [{}]".format( as_info( diff_file ) ) )

    if os.path.exists( patch_applied_path ):
        logger.debug( "[{}] already applied".format( as_info( diff_file ) ) )
        return

    command = "patch --batch -p1 --input={}".format( diff_file )

    logger.info( "Applying [{}] using [{}] in [{}]".format(
            as_info( diff_file ),
            as_info( command ),
            as_info( home )
    ) )

    if subprocess.call( shlex.split( command ), cwd=home ) != 0:
        logger.error( "Could not apply [{}]".format( diff_file ) )
    else:
        with open( patch_applied_path, "w" ) as patch_applied_file:
            pass
示例#33
0
    def apply_patch_if_needed( cls, home ):

        patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" )
        diff_file = "boost_test_patch.diff"

        if os.path.exists( patch_applied_path ):
            logger.debug( "[{}] already applied".format( as_info( diff_file ) ) )
            return

        diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file )

        command = "patch --batch -p1 --input={}".format( diff_path )

        logger.info( "Applying [{}] using [{}] in [{}]".format(
                as_info( diff_file ),
                as_info( command ),
                as_info( home )
        ) )

        if subprocess.call( shlex.split( command ), cwd=home ) != 0:
            logger.error( "Could not apply [{}]".format( diff_file ) )

        with open( patch_applied_path, "w" ) as patch_applied_file:
            pass
示例#34
0
    def call_project_sconscript_files(self, toolchain, variant, target_arch,
                                      abi, sconscript_env, project):

        sconscript_file = project

        if os.path.exists(sconscript_file) and os.path.isfile(sconscript_file):

            logger.debug(
                "project exists and added to build [{}] using [{},{},{}]".
                format(as_notice(sconscript_file), as_notice(toolchain.name()),
                       as_notice(variant), as_notice(target_arch)))

            path_without_ext = os.path.splitext(sconscript_file)[0]

            sconstruct_offset_path, sconscript_name = os.path.split(
                sconscript_file)

            name = os.path.splitext(sconscript_name)[0]
            sconscript_env['sconscript_name_id'] = name
            if name.lower() == "sconscript":
                sconscript_env['sconscript_name_id'] = ""
                path_without_ext = sconstruct_offset_path
                name = path_without_ext

            sconscript_env['sconscript_file'] = sconscript_file

            build_root = sconscript_env['build_root']
            working_folder = 'working'

            sconscript_env = sconscript_env.Clone()
            sconscript_env['sconscript_env'] = sconscript_env

            sconscript_env['sconscript_build_dir'] = path_without_ext
            sconscript_env['sconscript_toolchain_build_dir'] = os.path.join(
                path_without_ext, toolchain.name())
            sconscript_env['sconscript_dir'] = os.path.join(
                sconscript_env['base_path'], sconstruct_offset_path)
            sconscript_env['abs_sconscript_dir'] = os.path.abspath(
                sconscript_env['sconscript_dir'])
            sconscript_env['tool_variant_dir'] = os.path.join(
                toolchain.name(), variant, target_arch, abi)
            sconscript_env['tool_variant_working_dir'] = os.path.join(
                sconscript_env['tool_variant_dir'], working_folder)

            build_base_path = os.path.join(path_without_ext,
                                           sconscript_env['tool_variant_dir'])

            def flatten_dir(directory, join_char="_"):
                return join_char.join(
                    os.path.normpath(directory).split(os.path.sep))

            sconscript_env['build_base_path'] = build_base_path
            sconscript_env['flat_build_base'] = flatten_dir(build_base_path)

            sconscript_env['tool_variant_build_dir'] = os.path.join(
                build_root, sconscript_env['tool_variant_dir'], working_folder)
            sconscript_env['build_dir'] = os.path.normpath(
                os.path.join(build_root, build_base_path, working_folder, ''))
            sconscript_env['abs_build_dir'] = os.path.abspath(
                sconscript_env['build_dir'])
            sconscript_env['build_tool_variant_dir'] = os.path.normpath(
                os.path.join(build_root, sconscript_env['tool_variant_dir'],
                             working_folder, ''))
            sconscript_env['offset_dir'] = sconstruct_offset_path
            sconscript_env['offset_tool_variant_dir'] = os.path.join(
                sconscript_env['offset_dir'],
                sconscript_env['tool_variant_dir'])
            sconscript_env['tool_variant_dir_offset'] = os.path.normpath(
                os.path.join(sconscript_env['tool_variant_dir'],
                             sconscript_env['offset_dir']))
            sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath(
                os.path.join(flatten_dir(sconscript_env['tool_variant_dir']),
                             sconscript_env['offset_dir']))
            sconscript_env[
                'final_dir'] = '..' + os.path.sep + 'final' + os.path.sep
            sconscript_env['active_toolchain'] = toolchain

            def abs_final_dir(abs_build_dir, final_dir):
                return os.path.isabs(
                    final_dir) and final_dir or os.path.normpath(
                        os.path.join(abs_build_dir, final_dir))

            sconscript_env['abs_final_dir'] = abs_final_dir(
                sconscript_env['abs_build_dir'], sconscript_env['final_dir'])

            sconscript_env.AppendUnique(INCPATH=[sconscript_env['offset_dir']])

            sconscript_exports = {
                'env': sconscript_env,
                'sconscript_env': sconscript_env,
                'build_root': build_root,
                'build_dir': sconscript_env['build_dir'],
                'abs_build_dir': sconscript_env['abs_build_dir'],
                'final_dir': sconscript_env['final_dir'],
                'abs_final_dir': sconscript_env['abs_final_dir'],
                'common_variant_final_dir': '../../../common/final/',
                'common_project_final_dir': build_root + '/common/final/',
                'project': name,
            }

            self._configure.configure(sconscript_exports['env'])

            cuppa.modules.registration.init_env_for_variant(
                "methods", sconscript_exports)

            if sconscript_env['dump']:
                logger.info("{} {}".format(
                    as_info_label("Dumping ENV for"),
                    as_info(sconscript_exports['build_dir'])))
                dump = sconscript_env.Dump()
                logger.info("\n" + dump + "\n")
            else:
                SCons.Script.SConscript(
                    [sconscript_file],
                    variant_dir=sconscript_exports['build_dir'],
                    duplicate=0,
                    exports=sconscript_exports)

        else:
            logger.error(
                "Skipping non-existent project [{}] using [{},{},{}]".format(
                    as_error(sconscript_file), as_error(toolchain.name()),
                    as_error(variant), as_error(target_arch)))
示例#35
0
 def returncode( self ):
     if self._exception != None:
         logger.error("pspawn terminated with exception [{}]".format( str(self._exception) ) )
         raise self._exception
     return self._returncode
示例#36
0
    def call_project_sconscript_files( self, toolchain, variant, target_arch, sconscript_env, project ):

        sconscript_file = project

        if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ):

            logger.debug( "project exists and added to build [{}] using [{},{},{}]".format(
                    as_notice( sconscript_file ),
                    as_notice( toolchain.name() ),
                    as_notice( variant ),
                    as_notice( target_arch )
            ) )

            path_without_ext = os.path.splitext( sconscript_file )[0]

            sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file )

            name = os.path.splitext( sconscript_name )[0]
            if name.lower() == "sconscript":
                path_without_ext = sconstruct_offset_path
                name = path_without_ext

            sconscript_env['sconscript_file'] = sconscript_file

            build_root = sconscript_env['build_root']

            sconscript_env = sconscript_env.Clone()
            sconscript_env['sconscript_env'] = sconscript_env

            sconscript_env['sconscript_build_dir'] = path_without_ext
            sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() )
            sconscript_env['sconscript_dir']   = os.path.join( sconscript_env['base_path'], sconstruct_offset_path )
            sconscript_env['build_dir']        = os.path.normpath( os.path.join( build_root, path_without_ext, toolchain.name(), variant, target_arch, 'working', '' ) )
            sconscript_env['abs_build_dir']    = os.path.abspath( sconscript_env['build_dir'] )
            sconscript_env['offset_dir']       = sconstruct_offset_path
            sconscript_env['final_dir']        = '..' + os.path.sep + 'final' + os.path.sep
            sconscript_env['active_toolchain'] = toolchain

            def abs_final_dir( abs_build_dir, final_dir ):
                return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) )

            sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] )

            sconscript_env.AppendUnique( INCPATH = [
                    sconscript_env['offset_dir']
            ] )

            sconscript_exports = {
                'env'                     : sconscript_env,
                'sconscript_env'          : sconscript_env,
                'build_root'              : build_root,
                'build_dir'               : sconscript_env['build_dir'],
                'abs_build_dir'           : sconscript_env['abs_build_dir'],
                'final_dir'               : sconscript_env['final_dir'],
                'abs_final_dir'           : sconscript_env['abs_final_dir'],
                'common_variant_final_dir': '../../../common/final/',
                'common_project_final_dir': build_root + '/common/final/',
                'project'                 : name,
            }

            self._configure.configure( sconscript_exports['env'] )

            cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports )

            SCons.Script.SConscript(
                [ sconscript_file ],
                variant_dir = sconscript_exports['build_dir'],
                duplicate   = 0,
                exports     = sconscript_exports
            )

        else:
            logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format(
                    as_error( sconscript_file ),
                    as_error( toolchain.name() ),
                    as_error( variant ),
                    as_error( target_arch )
            ) )
示例#37
0
文件: location.py 项目: iCodeIN/cuppa
    def get_local_directory_for_repository( self, location, sub_dir, full_url, local_directory ):
        vc_type = location.split('+', 1)[0]
        backend = pip_vcs.vcs.get_backend( vc_type )
        if not backend:
            logger.error( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format(
                        as_error( vc_type ),
                        as_error( location )
            ) )
            raise LocationException( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format( vc_type, location ) )

        if self._cuppa_env['dump'] or self._cuppa_env['clean']:
            return local_directory

        local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

        if not self._offline:
            try:
                vcs_backend = backend( self.expand_secret( location ) )
            except: # Pip version >= 19
                backend.url = self.expand_secret( location )
                vcs_backend = backend

            if os.path.exists( local_directory ):
                self.update_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend )
            else:
                self.obtain_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend )

            logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) )
            logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) )
        else:
            branched_local_directory = None

            if self.location_match_current_branch():
                # If relative versioning is in play and we are offline check first to see
                # if the specified branch or tag is available and prefer that one
                if self._supports_relative_versioning and self._current_branch:
                    branched_local_directory = local_directory + "@" + self._current_branch
                    if os.path.exists( branched_local_directory ):
                        return branched_local_directory

                elif self._supports_relative_versioning and self._current_revision:
                    branched_local_directory = local_directory + "@" + self._current_revision
                    if os.path.exists( branched_local_directory ):
                        return branched_local_directory

            elif self._supports_relative_versioning and self._default_branch:
                branched_local_directory = local_directory + "@" + self._default_branch
                if os.path.exists( branched_local_directory ):
                    return branched_local_directory

            # If the preferred branch is not available then fallback to the
            # default of no branch being specified
            if os.path.exists( local_directory ):
                return local_directory
            else:
                if self.location_match_current_branch():
                    logger.error(
                        "Running in {offline} mode and neither [{local_dir}] or a branched dir"
                        " [{branched_dir}] exists so location cannot be retrieved".format(
                            offline      = as_info_label("OFFLINE"),
                            local_dir    = as_error(local_directory),
                            branched_dir = as_error(str(branched_local_directory))
                    ) )
                    raise LocationException(
                        "Running in {offline} mode and neither [{local_dir}] or a branched dir"
                        " [{branched_dir}] exists so location cannot be retrieved".format(
                            offline      = "OFFLINE",
                            local_dir    = local_directory,
                            branched_dir = str(branched_local_directory)
                    ) )
                else:
                    logger.error(
                        "Running in {offline} mode and [{local_dir}] does not exist"
                        " so location cannot be retrieved".format(
                            offline      = as_info_label("OFFLINE"),
                            local_dir    = as_error(local_directory)
                    ) )
                    raise LocationException(
                        "Running in {offline} mode and [{local_dir}] does not exist"
                        " so location cannot be retrieved".format(
                            offline      = "OFFLINE",
                            local_dir    = local_directory
                    ) )

        return local_directory
示例#38
0
    def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        timing_enabled = logger.isEnabledFor( logging.DEBUG )

        suppress_output = False
        if 'suppress_output' in kwargs:
            suppress_output = kwargs['suppress_output']
            del kwargs['suppress_output']

        use_shell = False
        if 'scons_env' in kwargs:
            use_shell = kwargs['scons_env'].get_option( 'use-shell' )
            del kwargs['scons_env']

        try:
            process = None
            stderr_thread = None

            timer = timing_enabled and cuppa.timer.Timer() or None
            if timer:
                logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) )

            close_fds = platform.system() == "Windows" and False or True

            if not suppress_output:
                sys.stdout.write( " ".join(args_list) + "\n" )

            process = subprocess.Popen(
                use_shell and " ".join(args_list) or args_list,
                **dict( kwargs, close_fds=close_fds, shell=use_shell, universal_newlines=True )
            )

            stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor )
            stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor )

            stderr_thread = threading.Thread( target=stderr_consumer )
            stderr_thread.start()
            stdout_consumer();
            stderr_thread.join()

            process.wait()

            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )

            return process.returncode

        except Exception as e:
            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )
            logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) )
            if process:
                logger.info( "Killing existing POpen object" )
                process.kill()
            if stderr_thread:
                logger.info( "Joining any running threads" )
                stderr_thread.join()
            raise e
示例#39
0
 def returncode( self ):
     if self._exception != None:
         logger.error("pspawn terminated with exception [{}]".format( str(self._exception) ) )
         raise self._exception
     return self._returncode
示例#40
0
    def call_project_sconscript_files( self, toolchain, variant, target_arch, abi, sconscript_env, project ):

        sconscript_file = project

        if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ):

            logger.debug( "project exists and added to build [{}] using [{},{},{}]".format(
                    as_notice( sconscript_file ),
                    as_notice( toolchain.name() ),
                    as_notice( variant ),
                    as_notice( target_arch )
            ) )

            path_without_ext = os.path.splitext( sconscript_file )[0]

            sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file )

            name = os.path.splitext( sconscript_name )[0]
            sconscript_env['sconscript_name_id'] = name
            if name.lower() == "sconscript":
                sconscript_env['sconscript_name_id'] = ""
                path_without_ext = sconstruct_offset_path
                name = path_without_ext

            sconscript_env['sconscript_file'] = sconscript_file

            build_root = sconscript_env['build_root']
            working_folder = 'working'

            sconscript_env = sconscript_env.Clone()
            sconscript_env['sconscript_env'] = sconscript_env

            sconscript_env['sconscript_build_dir'] = path_without_ext
            sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() )
            sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path )
            sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir'] )
            sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi )
            sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder )

            build_base_path = os.path.join( path_without_ext, sconscript_env['tool_variant_dir'] )

            def flatten_dir( directory, join_char="_" ):
                return join_char.join( os.path.normpath( directory ).split( os.path.sep ) )

            sconscript_env['build_base_path']  = build_base_path
            sconscript_env['flat_build_base']  = flatten_dir( build_base_path )

            sconscript_env['tool_variant_build_dir']  = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder )
            sconscript_env['build_dir']               = os.path.normpath( os.path.join( build_root, build_base_path, working_folder, '' ) )
            sconscript_env['abs_build_dir']           = os.path.abspath( sconscript_env['build_dir'] )
            sconscript_env['build_tool_variant_dir']  = os.path.normpath( os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder, '' ) )
            sconscript_env['offset_dir']              = sconstruct_offset_path
            sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir'] )
            sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join( sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'] ) )
            sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join( flatten_dir( sconscript_env['tool_variant_dir'] ), sconscript_env['offset_dir'] ) )
            sconscript_env['final_dir']               = '..' + os.path.sep + 'final' + os.path.sep
            sconscript_env['active_toolchain']        = toolchain

            def abs_final_dir( abs_build_dir, final_dir ):
                return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) )

            sconscript_env['abs_final_dir']  = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] )

            sconscript_env.AppendUnique( INCPATH = [
                    sconscript_env['offset_dir']
            ] )

            sconscript_exports = {
                'env'                     : sconscript_env,
                'sconscript_env'          : sconscript_env,
                'build_root'              : build_root,
                'build_dir'               : sconscript_env['build_dir'],
                'abs_build_dir'           : sconscript_env['abs_build_dir'],
                'final_dir'               : sconscript_env['final_dir'],
                'abs_final_dir'           : sconscript_env['abs_final_dir'],
                'common_variant_final_dir': '../../../common/final/',
                'common_project_final_dir': build_root + '/common/final/',
                'project'                 : name,
            }

            self._configure.configure( sconscript_exports['env'] )

            cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports )

            if sconscript_env['dump']:
                logger.info( "{} {}".format( as_info_label( "Dumping ENV for"), as_info( sconscript_exports['build_dir'] ) ) )
                dump = sconscript_env.Dump()
                logger.info( "\n" + dump + "\n" )
            else:
                SCons.Script.SConscript(
                    [ sconscript_file ],
                    variant_dir = sconscript_exports['build_dir'],
                    duplicate   = 0,
                    exports     = sconscript_exports
                )

        else:
            logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format(
                    as_error( sconscript_file ),
                    as_error( toolchain.name() ),
                    as_error( variant ),
                    as_error( target_arch )
            ) )
示例#41
0
    def get_local_directory(self, cuppa_env, location, sub_dir, branch,
                            full_url):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs(base):
            base = os.path.join(cuppa_env['working_dir'], base)

        if location.startswith('file:'):
            location = pip_download.url_to_path(location)

        if not pip_is_url(location):

            if pip_is_archive_file(location):

                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)
                local_directory = os.path.join(base, self._local_folder)

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        return local_directory

                self.extract(location, local_dir_with_sub_dir)
                logger.debug("(local archive) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local archive) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            else:
                local_directory = branch and os.path.join(location,
                                                          branch) or location
                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)

                logger.debug("(local file) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local file) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path(
                full_url, cuppa_env)
            local_directory = os.path.join(base, self._local_folder)

            if full_url.scheme.startswith(
                    'http') and self.url_is_download_archive_url(
                        full_url.path):
                logger.debug("[{}] is an archive download".format(
                    as_info(location)))

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        # If not empty this will fail
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug(
                            "(already present) Location = [{}]".format(
                                as_info(location)))
                        logger.debug(
                            "(already present) Local folder = [{}]".format(
                                as_info(str(self._local_folder))))

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive(
                    cuppa_env['cache_root'], self._local_folder)
                if cached_archive:
                    logger.debug("Cached archive [{}] found for [{}]".format(
                        as_info(cached_archive), as_info(location)))
                    self.extract(cached_archive, local_dir_with_sub_dir)
                else:
                    logger.info("Downloading [{}]...".format(
                        as_info(location)))
                    try:
                        report_hook = None
                        if logger.isEnabledFor(logging.INFO):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urlretrieve(location,
                                                        reporthook=report_hook)
                        name, extension = os.path.splitext(filename)
                        logger.info(
                            "[{}] successfully downloaded to [{}]".format(
                                as_info(location), as_info(filename)))
                        self.extract(filename, local_dir_with_sub_dir)
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join(
                                cuppa_env['cache_root'], self._local_folder)
                            logger.debug(
                                "Caching downloaded file as [{}]".format(
                                    as_info(cached_archive)))
                            shutil.copyfile(filename, cached_archive)
                    except ContentTooShortError as error:
                        logger.error(
                            "Download of [{}] failed with error [{}]".format(
                                as_error(location), as_error(str(error))))
                        raise LocationException(error)

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend(vc_type)
                if backend:
                    try:
                        vcs_backend = backend(self.expand_secret(location))
                    except:  # Pip version >= 19
                        backend.url = self.expand_secret(location)
                        vcs_backend = backend
                    local_dir_with_sub_dir = os.path.join(
                        local_directory, sub_dir and sub_dir or "")

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists(local_directory):
                        url, repository, branch, remote, revision = self.get_info(
                            location, local_dir_with_sub_dir, full_url,
                            vc_type)
                        rev_options = self.get_rev_options(vc_type,
                                                           vcs_backend,
                                                           local_remote=remote)
                        version = self.ver_rev_summary(branch, revision,
                                                       self._full_url.path)[0]
                        if not offline:
                            logger.info(
                                "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info(location),
                                    as_notice(local_dir_with_sub_dir),
                                    (rev_options and " on {}".format(
                                        as_notice(str(rev_options))) or ""),
                                    as_info(version)))
                            try:
                                update(vcs_backend, local_dir_with_sub_dir,
                                       rev_options)
                                logger.debug(
                                    "Successfully updated [{}]".format(
                                        as_info(location)))
                            except pip_exceptions.PipError as error:
                                logger.warn(
                                    "Could not update [{}] in [{}]{} due to error [{}]"
                                    .format(as_warning(location),
                                            as_warning(local_dir_with_sub_dir),
                                            (rev_options and " at {}".format(
                                                as_warning(str(rev_options)))
                                             or ""), as_warning(str(error))))
                        else:
                            logger.debug(
                                "Skipping update for [{}] as running in offline mode"
                                .format(as_info(location)))
                    else:
                        rev_options = self.get_rev_options(
                            vc_type, vcs_backend)
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info("{} [{}] into [{}]{}".format(
                                action, as_info(location),
                                as_info(local_dir_with_sub_dir), attempt > 1
                                and "(attempt {})".format(str(attempt)) or ""))
                            try:
                                obtain(vcs_backend, local_dir_with_sub_dir,
                                       vcs_backend.url)
                                logger.debug(
                                    "Successfully retrieved [{}]".format(
                                        as_info(location)))
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as(
                                    "Could not retrieve [{}] into [{}]{} due to error [{}]"
                                    .format(as_info(location),
                                            as_notice(local_dir_with_sub_dir),
                                            (rev_options and " to {}".format(
                                                as_notice(str(rev_options)))
                                             or ""), as_error(str(error))))
                                if attempt > max_attempts:
                                    raise LocationException(str(error))

                logger.debug("(url path) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(url path) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
示例#42
0
    def create( cls, env ):

        boost_id = boost_location_id( env )

        if not boost_id in cls._cached_boost_locations:
            logger.debug( "Adding boost [{}] to env".format( as_notice( str(boost_id) ) ) )
            cls._cached_boost_locations[ boost_id ] = get_boost_location( env, boost_id[0], boost_id[1], boost_id[2], boost_id[3] )

        location = cls._cached_boost_locations[ boost_id ]

        boost = None
        try:
            boost = cls( env, env[ 'platform' ], location )
        except BoostException as e:
            logger.error( "Could not create boost dependency - {}".format(e) )
            return None

        if not boost:
            logger.error( "Could not create boost dependency" )
            return None

        build_always   = env.get_option( 'boost-build-always' )
        verbose_build  = env.get_option( 'boost-verbose-build' )
        verbose_config = env.get_option( 'boost-verbose-config' )

        env.AddMethod(
                BoostStaticLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostStaticLibrary"
        )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostSharedLibrary"
        )
        env.AddMethod(
                BoostStaticLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostStaticLib"
        )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=False,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostSharedLib"
        )
        env.AddMethod(
                 BoostStaticLibraryMethod(
                        add_dependents=True,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostStaticLibs"
        )
        env.AddMethod(
                BoostSharedLibraryMethod(
                        add_dependents=True,
                        build_always=build_always,
                        verbose_build=verbose_build,
                        verbose_config=verbose_config),
                "BoostSharedLibs"
        )
        return boost
示例#43
0
    def __call__( self, target, source, env ):

        executable   = str( source[0].abspath )
        working_dir  = self._working_dir and self._working_dir or os.path.split( executable )[0]
        program_path = source[0].path
        notifier     = Notify(env, env['show_test_output'])

        if cuppa.build_platform.name() == "Windows":
            executable = '"' + executable + '"'

        boost_version = None
        preprocess = self.default_preprocess
        argument_prefix = ""

        if 'boost' in env['dependencies']:
            boost_version = env['dependencies']['boost']( env ).numeric_version()
            if env['dependencies']['boost']( env ).patched_test():
                argument_prefix="boost.test."

        test_command = executable + " --{0}log_format=hrf --{0}log_level=test_suite --{0}report_level=no".format( argument_prefix )

        if boost_version:
            if boost_version >= 1.67:
                preprocess = cuppa.utility.preprocess.AnsiEscape.strip
                test_command = executable + " --{0}log_format=HRF --{0}log_level=all --{0}report_level=no --{0}color_output=no".format( argument_prefix )
            elif boost_version >= 1.60:
                test_command = executable + " --{0}log_format=HRF --{0}log_level=test_suite --{0}report_level=no".format( argument_prefix )

        try:
            return_code, tests = self._run_test(
                    program_path,
                    test_command,
                    working_dir,
                    env['branch_root'],
                    notifier,
                    preprocess,
                    env
            )

            cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), tests )

            if return_code < 0:
                self._write_file_to_stderr( stderr_file_name_from( program_path ) )
                logger.error( "Test was terminated by signal: {}".format( as_notice(str(-return_code)) ) )
            elif return_code > 0:
                self._write_file_to_stderr( stderr_file_name_from( program_path ) )
                logger.error( "Test returned with error code: {}".format( as_notice(str(return_code)) ) )
            elif notifier.master_suite['status'] != 'passed':
                logger.error( "Not all test suites passed" )
                raise BuildError( node=source[0], errstr="Not all test suites passed" )

            if return_code:
                self._remove_success_file( success_file_name_from( program_path ) )
                if return_code < 0:
                    raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code) ) )
                else:
                    raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code) ) )
            else:
                self._write_success_file( success_file_name_from( program_path ) )

            return None

        except OSError, e:
            logger.error( "Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)) ) )
            raise BuildError( e )
示例#44
0
    def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        timing_enabled = logger.isEnabledFor( logging.DEBUG )

        suppress_output = False
        if 'suppress_output' in kwargs:
            suppress_output = kwargs['suppress_output']
            del kwargs['suppress_output']

        use_shell = False
        if 'scons_env' in kwargs:
            use_shell = kwargs['scons_env'].get_option( 'use-shell' )
            del kwargs['scons_env']

        orig_stdout = sys.stdout
        orig_stderr = sys.stderr

        try:
            # TODO: Review this as it might be needed for Windows otherwise replace
            # the wrapped values with orig_stdout and orig_stderr respectively
            sys.stdout = AutoFlushFile( colorama.initialise.wrapped_stdout )
            sys.stderr = AutoFlushFile( colorama.initialise.wrapped_stderr )

            process = None
            stderr_thread = None

            timer = timing_enabled and cuppa.timer.Timer() or None
            if timer:
                logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) )

            close_fds = platform.system() == "Windows" and False or True

            if not suppress_output:
                sys.stdout.write( " ".join(args_list) + "\n" )

            process = subprocess.Popen(
                use_shell and " ".join(args_list) or args_list,
                **dict( kwargs, close_fds=close_fds, shell=use_shell )
            )

            stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor )
            stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor )

            stderr_thread = threading.Thread( target=stderr_consumer )
            stderr_thread.start()
            stdout_consumer();
            stderr_thread.join()

            process.wait()

            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )

            return process.returncode

        except Exception as e:
            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )
            logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) )
            if process:
                logger.info( "Killing existing POpen object" )
                process.kill()
            if stderr_thread:
                logger.info( "Joining any running threads" )
                stderr_thread.join()
            raise e

        finally:
            sys.stdout = orig_stdout
            sys.stderr = orig_stderr
示例#45
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip.download.url_to_path( location )

        if not pip.download.is_url( location ):

            if pip.download.is_archive_file( location ):

                local_folder = self.folder_name_from_path( location )
                local_directory = os.path.join( base, local_folder )

                if os.path.exists( local_directory ):
                    try:
                        os.rmdir( local_directory )
                    except:
                        return local_directory, False

                self.extract( location, local_directory )
            else:
                local_directory = branch and os.path.join( location, branch ) or location
                return local_directory, False
        else:

            local_folder = self.folder_name_from_path( full_url )
            local_directory = os.path.join( base, local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory
                        return local_directory, True

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip.vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( location )
                    rev_options = self.get_rev_options( vc_type, vcs_backend )

                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                    if os.path.exists( local_directory ):

                        url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        logger.debug( "Updating [{}] in [{}]{} at [{}]".format(
                                as_info( location ),
                                as_notice( local_dir_with_sub_dir ),
                                ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                as_info( version )
                        ) )
                        try:
                            vcs_backend.update( local_dir_with_sub_dir, rev_options )
                            logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                    as_warning( location ),
                                    as_warning( local_dir_with_sub_dir ),
                                    ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                    as_warning( str(error) )
                            ) )
                    else:
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        logger.info( "{} [{}] into [{}]".format(
                                action, as_info( location ),
                                as_info( local_dir_with_sub_dir )
                        ) )
                        try:
                            vcs_backend.obtain( local_dir_with_sub_dir )
                            logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                    as_error( location ),
                                    as_error( local_dir_with_sub_dir ),
                                    ( rev_options and  " to {}".format( as_error(  str(rev_options) ) ) or ""),
                                    as_error( str( error ) )
                            ) )
                            raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            return local_directory, True
示例#46
0
    def __call__(self, target, source, env):

        executable = str(source[0].abspath)
        working_dir = self._working_dir and self._working_dir or os.path.split(
            executable)[0]
        program_path = source[0].path
        notifier = Notify(env, env['show_test_output'])

        if cuppa.build_platform.name() == "Windows":
            executable = '"' + executable + '"'

        boost_version = None
        preprocess = self.default_preprocess
        argument_prefix = ""

        if 'boost' in env['dependencies']:
            boost_version = env['dependencies']['boost'](env).numeric_version()
            if env['dependencies']['boost'](env).patched_test():
                argument_prefix = "boost.test."

        test_command = executable + " --{0}log_format=hrf --{0}log_level=test_suite --{0}report_level=no".format(
            argument_prefix)

        if boost_version:
            if boost_version >= 1.67:
                preprocess = cuppa.utility.preprocess.AnsiEscape.strip
                test_command = executable + " --{0}log_format=HRF --{0}log_level=all --{0}report_level=no --{0}color_output=no".format(
                    argument_prefix)
            elif boost_version >= 1.60:
                test_command = executable + " --{0}log_format=HRF --{0}log_level=test_suite --{0}report_level=no".format(
                    argument_prefix)

        try:
            return_code, tests = self._run_test(program_path, test_command,
                                                working_dir,
                                                env['branch_root'], notifier,
                                                preprocess, env)

            cuppa.test_report.cuppa_json.write_report(
                report_file_name_from(program_path), tests)

            if return_code < 0:
                self._write_file_to_stderr(stderr_file_name_from(program_path))
                logger.error("Test was terminated by signal: {}".format(
                    as_notice(str(-return_code))))
            elif return_code > 0:
                self._write_file_to_stderr(stderr_file_name_from(program_path))
                logger.error("Test returned with error code: {}".format(
                    as_notice(str(return_code))))
            elif notifier.master_suite['status'] != 'passed':
                logger.error("Not all test suites passed")
                raise BuildError(node=source[0],
                                 errstr="Not all test suites passed")

            if return_code:
                self._remove_success_file(success_file_name_from(program_path))
                if return_code < 0:
                    raise BuildError(
                        node=source[0],
                        errstr="Test was terminated by signal: {}".format(
                            str(-return_code)))
                else:
                    raise BuildError(
                        node=source[0],
                        errstr="Test returned with error code: {}".format(
                            str(return_code)))
            else:
                self._write_success_file(success_file_name_from(program_path))

            return None

        except OSError as e:
            logger.error("Execution of [{}] failed with error: {}".format(
                as_notice(test_command), as_notice(str(e))))
            raise BuildError(e)
示例#47
0
    def __call__(self, target, source, env):

        executable = str(source[0].abspath)
        working_dir, test = os.path.split(executable)
        if self._working_dir:
            working_dir = self._working_dir
        program_path = source[0].path
        suite = env['build_dir']

        if cuppa.build_platform.name() == "Windows":
            executable = '"' + executable + '"'

        test_command = executable
        if self._command:
            test_command = self._command
            working_dir = self._working_dir and self._working_dir or self._final_dir
            test = os.path.relpath(executable, working_dir)

        test_suite = TestSuite.create(suite, env)

        test_case = test_suite.enter_test(test, expected=self._expected)

        show_test_output = env['show_test_output']

        try:
            return_code = self._run_test(test_case, show_test_output,
                                         program_path, test_command,
                                         working_dir, env)

            if return_code == self._expected_exit_code:
                test_suite.exit_test(test_case, 'passed')
            elif return_code < 0:
                self.__write_file_to_stderr(
                    stderr_file_name_from(program_path))
                logger.error("Test was terminated by signal: {}".format(
                    as_error(str(return_code))))
                test_suite.exit_test(test_case, 'aborted')
            elif return_code > 0:
                self.__write_file_to_stderr(
                    stderr_file_name_from(program_path))
                logger.error("Test returned with error code: {}".format(
                    as_error(str(return_code))))
                test_suite.exit_test(test_case, 'failed')
            else:
                test_suite.exit_test(test_case, 'passed')

            cuppa.test_report.cuppa_json.write_report(
                report_file_name_from(program_path), test_suite.tests())

            if return_code == self._expected_exit_code:
                self._write_success_file(success_file_name_from(program_path))
            elif return_code:
                self._remove_success_file(success_file_name_from(program_path))
                if return_code < 0:
                    raise BuildError(
                        node=source[0],
                        errstr="Test was terminated by signal: {}".format(
                            str(-return_code)))
                else:
                    raise BuildError(
                        node=source[0],
                        errstr="Test returned with error code: {}".format(
                            str(return_code)))
            else:
                self._write_success_file(success_file_name_from(program_path))

            return None

        except OSError, e:
            logger.error("Execution of [{}] failed with error: {}".format(
                as_notice(test_command), as_notice(str(e))))
            raise BuildError(e)
示例#48
0
    def __call__( self, target, source, env ):

        executable = str( source[0].abspath )
        working_dir, test = os.path.split( executable )
        if self._working_dir:
            working_dir = self._working_dir
        program_path = source[0].path
        suite = env['build_dir']

        if cuppa.build_platform.name() == "Windows":
            executable = '"' + executable + '"'

        test_command = executable
        if self._command:
            test_command = self._command
            working_dir = self._working_dir and self._working_dir or self._final_dir
            test = os.path.relpath( executable, working_dir )

        test_suite = TestSuite.create( suite, env )

        test_case = test_suite.enter_test( test, expected=self._expected )

        show_test_output = env['show_test_output']

        try:
            return_code = self._run_test(
                    test_case,
                    show_test_output,
                    program_path,
                    test_command,
                    working_dir,
                    env
            )

            if return_code == self._expected_exit_code:
                test_suite.exit_test( test_case, 'passed' )
            elif return_code < 0:
                self.__write_file_to_stderr( stderr_file_name_from( program_path ) )
                logger.error( "Test was terminated by signal: {}".format( as_error(str(return_code) ) ) )
                test_suite.exit_test( test_case, 'aborted' )
            elif return_code > 0:
                self.__write_file_to_stderr( stderr_file_name_from( program_path ) )
                logger.error( "Test returned with error code: {}".format( as_error(str(return_code) ) ) )
                test_suite.exit_test( test_case, 'failed' )
            else:
                test_suite.exit_test( test_case, 'passed' )

            cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), test_suite.tests() )

            if return_code == self._expected_exit_code:
                self._write_success_file( success_file_name_from( program_path ) )
            elif return_code:
                self._remove_success_file( success_file_name_from( program_path ) )
                if return_code < 0:
                    raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code) ) )
                else:
                    raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code) ) )
            else:
                self._write_success_file( success_file_name_from( program_path ) )

            return None

        except OSError, e:
            logger.error( "Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)) ) )
            raise BuildError( e )
示例#49
0
文件: location.py 项目: ja11sop/cuppa
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip_download.url_to_path( location )

        if not pip_download.is_url( location ):

            if pip_download.is_archive_file( location ):

                self._local_folder = self.folder_name_from_path( location, cuppa_env )
                local_directory = os.path.join( base, self._local_folder )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        return local_directory

                self.extract( location, local_dir_with_sub_dir )
                logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            else:
                local_directory = branch and os.path.join( location, branch ) or location
                self._local_folder = self.folder_name_from_path( location, cuppa_env )

                logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path( full_url, cuppa_env )
            local_directory = os.path.join( base, self._local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) )
                        logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) )

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( error )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( self.expand_secret( location ) )
                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists( local_directory ):
                        url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
                        rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        if not offline:
                            logger.info( "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info( location ),
                                    as_notice( local_dir_with_sub_dir ),
                                    ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                    as_info( version )
                            ) )
                            try:
                                update( vcs_backend, local_dir_with_sub_dir, rev_options )
                                logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                            except pip_exceptions.PipError as error:
                                logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                        as_warning( location ),
                                        as_warning( local_dir_with_sub_dir ),
                                        ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                        as_warning( str(error) )
                                ) )
                        else:
                            logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) )
                    else:
                        rev_options = self.get_rev_options( vc_type, vcs_backend )
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info( "{} [{}] into [{}]{}".format(
                                    action,
                                    as_info( location ),
                                    as_info( local_dir_with_sub_dir ),
                                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
                            ) )
                            try:
                                vcs_backend.obtain( local_dir_with_sub_dir )
                                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                        as_info( location ),
                                        as_notice( local_dir_with_sub_dir ),
                                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                                        as_error( str(error) )
                                ) )
                                if attempt > max_attempts:
                                    raise LocationException( str(error) )

                logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory