def get_local_directory_for_download_url(self, location, sub_dir, local_directory): logger.debug("[{}] is an archive download".format(as_info(location))) local_dir_with_sub_dir = os.path.join(local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug("(already present) Location = [{}]".format( as_info(location))) logger.debug("(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if self._cuppa_env['dump'] or self._cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format(as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info("[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if self._cuppa_env['cache_root']: cached_archive = os.path.join( self._cuppa_env['cache_root'], self._local_folder) logger.debug("Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error("Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) return local_directory
def log_exception( error, suppress=None ): from cuppa.log import logger from cuppa.colourise import as_info if not suppress: logger.fatal( "Cuppa terminated by exception [{}: {}]".format( as_info( error.__class__.__name__ ), as_info( str(error) ) ) ) if not logger.isEnabledFor( logging.EXCEPTION ): logger.warn( "Use {} (or above) to see the stack".format( as_info( "--verbosity=exception" ) ) ) logger.exception( traceback.format_exc() )
def __call_classmethod_for_classes_in_module(package, name, path, method, *args, **kwargs): try: filehandle, pathname, description = imp.find_module( name, path and [path] or None) try: try: qualified_name = package and package + "." + name or name module = sys.modules[qualified_name] except KeyError as error: module = imp.load_module(name, filehandle, pathname, description) for member_name in dir(module): member = getattr(module, member_name) if inspect.ismodule(member): if package: parent_package = package + "." + name else: parent_package = name __call_classmethod_for_classes_in_module( parent_package, member_name, pathname, method, *args, **kwargs) elif inspect.isclass(member): try: function = getattr(member, method) if callable(function): try: function(*args, **kwargs) except Exception as error: if logger.isEnabledFor(logging.EXCEPTION): logger.error( "[{}] in [{}] failed with error [{}]". format(as_info(str(method)), as_notice(str(member)), as_info(str(error)))) traceback.print_exc() raise error except AttributeError as ignore: pass finally: if filehandle: filehandle.close() except ImportError as error: pass
def run(*args, **kwargs): from inspect import getframeinfo, stack caller = getframeinfo(stack()[1][0]) sconsctruct_path = caller.filename import traceback from cuppa.log import logger, initialise_logging from cuppa.colourise import as_info import logging initialise_logging() try: import cuppa.core cuppa.core.run(sconsctruct_path, *args, **kwargs) except Exception as error: logger.error("Cuppa terminated by exception [{}: {}]".format( as_info(error.__class__.__name__), as_info(str(error)))) if not logger.isEnabledFor(logging.EXCEPTION): logger.error("Use {} (or above) to see the stack".format( as_info("--verbosity=exception"))) logger.exception(traceback.format_exc())
def progress_action( label, event, sconscript, variant, env ): progress = Progress( event, sconscript, variant, env ) description = None if logger.isEnabledFor( logging.INFO ): stage = "" name = "" if label.startswith("#"): stage = as_notice( label[1:] ) elif not variant: stage = as_notice(label) + " sconscript: [" name = as_notice( sconscript ) + "]" else: stage = as_notice(label) + " variant: [" name = as_info( variant ) + "]" description = "Progress( {}{} )".format( stage, name ) return Action( progress, description )
def progress_action(label, event, sconscript, variant, env): progress = Progress(event, sconscript, variant, env) description = None if logger.isEnabledFor(logging.INFO): stage = "" name = "" if label.startswith("#"): stage = as_notice(label[1:]) elif not variant: stage = as_notice(label) + " sconscript: [" name = as_notice(sconscript) + "]" else: stage = as_notice(label) + " variant: [" name = as_info(variant) + "]" description = "Progress( {}{} )".format(stage, name) return Action(progress, description)
def get_local_directory(self, cuppa_env, location, sub_dir, branch, full_url): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs(base): base = os.path.join(cuppa_env['working_dir'], base) if location.startswith('file:'): location = pip_download.url_to_path(location) if not pip_is_url(location): if pip_is_archive_file(location): self._local_folder = self.folder_name_from_path( location, cuppa_env) local_directory = os.path.join(base, self._local_folder) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if os.path.exists(local_dir_with_sub_dir): try: os.rmdir(local_dir_with_sub_dir) except: return local_directory self.extract(location, local_dir_with_sub_dir) logger.debug("(local archive) Location = [{}]".format( as_info(location))) logger.debug("(local archive) Local folder = [{}]".format( as_info(self._local_folder))) else: local_directory = branch and os.path.join(location, branch) or location self._local_folder = self.folder_name_from_path( location, cuppa_env) logger.debug("(local file) Location = [{}]".format( as_info(location))) logger.debug("(local file) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env) local_directory = os.path.join(base, self._local_folder) if full_url.scheme.startswith( 'http') and self.url_is_download_archive_url( full_url.path): logger.debug("[{}] is an archive download".format( as_info(location))) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info(location))) logger.debug( "(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format( as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info( "[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder) logger.debug( "Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend(vc_type) if backend: try: vcs_backend = backend(self.expand_secret(location)) except: # Pip version >= 19 backend.url = self.expand_secret(location) vcs_backend = backend local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists(local_directory): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type) rev_options = self.get_rev_options(vc_type, vcs_backend, local_remote=remote) version = self.ver_rev_summary(branch, revision, self._full_url.path)[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " on {}".format( as_notice(str(rev_options))) or ""), as_info(version))) try: update(vcs_backend, local_dir_with_sub_dir, rev_options) logger.debug( "Successfully updated [{}]".format( as_info(location))) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]" .format(as_warning(location), as_warning(local_dir_with_sub_dir), (rev_options and " at {}".format( as_warning(str(rev_options))) or ""), as_warning(str(error)))) else: logger.debug( "Skipping update for [{}] as running in offline mode" .format(as_info(location))) else: rev_options = self.get_rev_options( vc_type, vcs_backend) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info("{} [{}] into [{}]{}".format( action, as_info(location), as_info(local_dir_with_sub_dir), attempt > 1 and "(attempt {})".format(str(attempt)) or "")) try: obtain(vcs_backend, local_dir_with_sub_dir, vcs_backend.url) logger.debug( "Successfully retrieved [{}]".format( as_info(location))) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]" .format(as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " to {}".format( as_notice(str(rev_options))) or ""), as_error(str(error)))) if attempt > max_attempts: raise LocationException(str(error)) logger.debug("(url path) Location = [{}]".format( as_info(location))) logger.debug("(url path) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory
def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ): kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE timing_enabled = logger.isEnabledFor( logging.DEBUG ) suppress_output = False if 'suppress_output' in kwargs: suppress_output = kwargs['suppress_output'] del kwargs['suppress_output'] use_shell = False if 'scons_env' in kwargs: use_shell = kwargs['scons_env'].get_option( 'use-shell' ) del kwargs['scons_env'] try: process = None stderr_thread = None timer = timing_enabled and cuppa.timer.Timer() or None if timer: logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) ) close_fds = platform.system() == "Windows" and False or True if not suppress_output: sys.stdout.write( " ".join(args_list) + "\n" ) process = subprocess.Popen( use_shell and " ".join(args_list) or args_list, **dict( kwargs, close_fds=close_fds, shell=use_shell, universal_newlines=True ) ) stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor ) stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor ) stderr_thread = threading.Thread( target=stderr_consumer ) stderr_thread.start() stdout_consumer(); stderr_thread.join() process.wait() if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) return process.returncode except Exception as e: if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) ) if process: logger.info( "Killing existing POpen object" ) process.kill() if stderr_thread: logger.info( "Joining any running threads" ) stderr_thread.join() raise e
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip.download.url_to_path( location ) if not pip.download.is_url( location ): if pip.download.is_archive_file( location ): local_folder = self.folder_name_from_path( location ) local_directory = os.path.join( base, local_folder ) if os.path.exists( local_directory ): try: os.rmdir( local_directory ) except: return local_directory, False self.extract( location, local_directory ) else: local_directory = branch and os.path.join( location, branch ) or location return local_directory, False else: local_folder = self.folder_name_from_path( full_url ) local_directory = os.path.join( base, local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory return local_directory, True # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip.vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( location ) rev_options = self.get_rev_options( vc_type, vcs_backend ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) if os.path.exists( local_directory ): url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] logger.debug( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: vcs_backend.update( local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: action = "Cloning" if vc_type == "svn": action = "Checking out" logger.info( "{} [{}] into [{}]".format( action, as_info( location ), as_info( local_dir_with_sub_dir ) ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_error( location ), as_error( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_error( str(rev_options) ) ) or ""), as_error( str( error ) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) return local_directory, True
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip_download.url_to_path( location ) if not pip_download.is_url( location ): if pip_download.is_archive_file( location ): self._local_folder = self.folder_name_from_path( location, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if os.path.exists( local_dir_with_sub_dir ): try: os.rmdir( local_dir_with_sub_dir ) except: return local_directory self.extract( location, local_dir_with_sub_dir ) logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: local_directory = branch and os.path.join( location, branch ) or location self._local_folder = self.folder_name_from_path( location, cuppa_env ) logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) ) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( error ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( self.expand_secret( location ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists( local_directory ): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type ) rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: update( vcs_backend, local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) ) else: rev_options = self.get_rev_options( vc_type, vcs_backend ) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info( "{} [{}] into [{}]{}".format( action, as_info( location ), as_info( local_dir_with_sub_dir ), attempt > 1 and "(attempt {})".format( str(attempt) ) or "" ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_notice( str(rev_options) ) ) or ""), as_error( str(error) ) ) ) if attempt > max_attempts: raise LocationException( str(error) ) logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory
def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ): kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE timing_enabled = logger.isEnabledFor( logging.DEBUG ) suppress_output = False if 'suppress_output' in kwargs: suppress_output = kwargs['suppress_output'] del kwargs['suppress_output'] use_shell = False if 'scons_env' in kwargs: use_shell = kwargs['scons_env'].get_option( 'use-shell' ) del kwargs['scons_env'] orig_stdout = sys.stdout orig_stderr = sys.stderr try: # TODO: Review this as it might be needed for Windows otherwise replace # the wrapped values with orig_stdout and orig_stderr respectively sys.stdout = AutoFlushFile( colorama.initialise.wrapped_stdout ) sys.stderr = AutoFlushFile( colorama.initialise.wrapped_stderr ) process = None stderr_thread = None timer = timing_enabled and cuppa.timer.Timer() or None if timer: logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) ) close_fds = platform.system() == "Windows" and False or True if not suppress_output: sys.stdout.write( " ".join(args_list) + "\n" ) process = subprocess.Popen( use_shell and " ".join(args_list) or args_list, **dict( kwargs, close_fds=close_fds, shell=use_shell ) ) stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor ) stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor ) stderr_thread = threading.Thread( target=stderr_consumer ) stderr_thread.start() stdout_consumer(); stderr_thread.join() process.wait() if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) return process.returncode except Exception as e: if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) ) if process: logger.info( "Killing existing POpen object" ) process.kill() if stderr_thread: logger.info( "Joining any running threads" ) stderr_thread.join() raise e finally: sys.stdout = orig_stdout sys.stderr = orig_stderr
def Popen2(cls, stdout_processor, stderr_processor, args_list, **kwargs): kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE timing_enabled = logger.isEnabledFor(logging.DEBUG) suppress_output = False if 'suppress_output' in kwargs: suppress_output = kwargs['suppress_output'] del kwargs['suppress_output'] use_shell = False if 'scons_env' in kwargs: use_shell = kwargs['scons_env'].get_option('use-shell') del kwargs['scons_env'] orig_stdout = sys.stdout orig_stderr = sys.stderr try: # TODO: Review this as it might be needed for Windows otherwise replace # the wrapped values with orig_stdout and orig_stderr respectively sys.stdout = AutoFlushFile(colorama.initialise.wrapped_stdout) sys.stderr = AutoFlushFile(colorama.initialise.wrapped_stderr) process = None stderr_thread = None timer = timing_enabled and cuppa.timer.Timer() or None if timer: logger.debug("Command [{}] - Running...".format( as_notice(str(timer.timer_id())))) close_fds = platform.system() == "Windows" and False or True if not suppress_output: sys.stdout.write(" ".join(args_list) + "\n") process = subprocess.Popen( use_shell and " ".join(args_list) or args_list, **dict(kwargs, close_fds=close_fds, shell=use_shell)) stderr_consumer = LineConsumer(process.stderr.readline, stderr_processor) stdout_consumer = LineConsumer(process.stdout.readline, stdout_processor) stderr_thread = threading.Thread(target=stderr_consumer) stderr_thread.start() stdout_consumer() stderr_thread.join() process.wait() if timer: timer.stop() logger.debug("Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string(timer.elapsed()))) return process.returncode except Exception as e: if timer: timer.stop() logger.debug("Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string(timer.elapsed()))) logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e))) if process: logger.info("Killing existing POpen object") process.kill() if stderr_thread: logger.info("Joining any running threads") stderr_thread.join() raise e finally: sys.stdout = orig_stdout sys.stderr = orig_stderr