def _run_command(self, source, suppress_output, program_path, command, working_dir, env, retry): log_failure = retry and logger.warn or logger.error success = False monitor = Monitor(program_path, env) monitor.start() try: return_code = self._run(suppress_output, program_path, command, working_dir, env) if return_code == self._expected_exit_code: monitor.stop(status='success') success = True elif return_code < 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) log_failure("Command was terminated by signal: {}".format( as_error(str(return_code)))) monitor.stop(status='aborted', treat_error_as_warning=retry) elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) log_failure("Command returned with error code: {}".format( as_error(str(return_code)))) monitor.stop(status='failed', treat_error_as_warning=retry) else: monitor.stop(status='success') success = True if return_code == self._expected_exit_code: self._write_success_file(success_file_name_from(program_path)) elif return_code: self._remove_success_file(success_file_name_from(program_path)) if not retry: if return_code < 0: raise BuildError( node=source and source[0] or None, errstr="Command was terminated by signal: {}". format(str(-return_code))) else: raise BuildError( node=source and source[0] or None, errstr="Command returned with error code: {}". format(str(return_code))) else: self._write_success_file(success_file_name_from(program_path)) return success except OSError, e: log_failure("Execution of [{}] failed with error: {}".format( as_notice(command), as_notice(str(e)))) monitor.stop(status='failed', treat_error_as_warning=retry) if not retry: raise BuildError(e)
def get_local_directory_for_download_url(self, location, sub_dir, local_directory): logger.debug("[{}] is an archive download".format(as_info(location))) local_dir_with_sub_dir = os.path.join(local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug("(already present) Location = [{}]".format( as_info(location))) logger.debug("(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if self._cuppa_env['dump'] or self._cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format(as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info("[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if self._cuppa_env['cache_root']: cached_archive = os.path.join( self._cuppa_env['cache_root'], self._local_folder) logger.debug("Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error("Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) return local_directory
def __call__( self, env, standard ): if standard not in self.stdcpp_choices: print as_error( env, "cuppa: stdcpp: error: [{}] not in allowed list {}".format( standard, self.stdcpp_choices ) ) return None env[ 'stdcpp' ] = standard toolchain = env['toolchain'] flag = toolchain.stdcpp_flag_for( standard ) env.ReplaceFlags( [ flag ] ) return None
def retrieve_repo_info(cls, vcs_system, vcs_directory, expected_vc_type): if not expected_vc_type or expected_vc_type == vcs_system.vc_type(): try: info = vcs_system.info(vcs_directory) return info except vcs_system.Error as ex: if expected_vc_type: logger.error( "Failed to retreive info for [{}] because [{}]".format( as_error(vcs_directory), as_error(str(ex)))) raise return None
def retrieve_repo_info( cls, vcs_system, vcs_directory, expected_vc_type ): if not expected_vc_type or expected_vc_type == vcs_system.vc_type(): try: info = vcs_system.info( vcs_directory ) return info except vcs_system.Error as ex: if expected_vc_type: logger.error( "Failed to retreive info for [{}] because [{}]".format( as_error( vcs_directory ), as_error( str(ex) ) ) ) raise return None
def add_to_env( cls, env ): try: generate = env.get_option( 'generate-cbs' ) if generate: obj = cls( env, env.get_option( 'generate_cbs_include_thirdparty' ), env.get_option( 'generate_cbs_exclude_relative_branches' ), env.get_option( 'generate_cbs_exclude_paths_starting' ), env.get_option( 'generate_cbs_place_with_sconscript' ) ) env['project_generators']['codeblocks'] = obj except CodeblocksException as error: print as_error( env, "cuppa: error: failed to create CodeBlocks project generator with error [{}]".format( error ) )
def obtain_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ): rev_options = self.get_rev_options( vc_type, vcs_backend ) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info( "{} [{}] into [{}]{}".format( action, as_info( location ), as_info( local_dir_with_sub_dir ), attempt > 1 and "(attempt {})".format( str(attempt) ) or "" ) ) try: obtain( vcs_backend, local_dir_with_sub_dir, vcs_backend.url ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_notice( str(rev_options) ) ) or ""), as_error( str(error) ) ) ) if attempt > max_attempts: raise LocationException( str(error) )
def __call__( self, env, standard ): if standard not in self.stdcpp_choices: logger.error( "[{}] not in allowed list {}".format( as_error( standard ), as_notice( self.stdcpp_choices ) ) ) return None env[ 'stdcpp' ] = standard toolchain = env['toolchain'] flag = toolchain.stdcpp_flag_for( standard ) env.ReplaceFlags( [ flag ] ) return None
def _read(cls, json_report_path, default={}): with open(json_report_path, "r") as report: try: report = json.load(report) return report except ValueError as error: logger.error( "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing" .format(as_info(json_report_path), as_error(str(error)))) return default
def __call__(self, env, standard): if standard not in self.stdcpp_choices: logger.error("[{}] not in allowed list {}".format( as_error(standard), as_notice(self.stdcpp_choices))) return None env['stdcpp'] = standard toolchain = env['toolchain'] flag = toolchain.stdcpp_flag_for(standard) env.ReplaceFlags([flag]) return None
def _read( cls, json_report_path, default={} ): with open( json_report_path, "r" ) as report: try: report = json.load( report ) return report except ValueError as error: logger.error( "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing".format( as_info( json_report_path ), as_error( str(error) ) ) ) return default
def retrieve_repo_info( cls, vcs_system, vcs_directory, expected_vc_type ): if not expected_vc_type or expected_vc_type == vcs_system.vc_type(): try: logger.trace( "expected_vc_type=[{expected_vc_type}], vcs_system=[{vc_type}], vcs_directory=[{directory}]".format( expected_vc_type=as_info( str(expected_vc_type) ), vc_type=as_info( vcs_system and vcs_system.vc_type() or "None" ), directory=as_notice( str(vcs_directory) ) ) ) info = vcs_system.info( vcs_directory ) logger.trace( "vcs_info=[{vcs_info}]".format( vcs_info=as_info(str(info)) ) ) return info except vcs_system.Error as ex: if expected_vc_type: logger.error( "Failed to retreive info for [{}] because [{}]".format( as_error( vcs_directory ), as_error( str(ex) ) ) ) raise return None
def apply_patch_if_needed( self, env, home ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) diff_file = "boost_test_patch.diff" if os.path.exists( patch_applied_path ): print "cuppa: boost: [{}] already applied".format( as_info( env, diff_file ) ) return diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file ) command = "patch --batch -p1 --input={}".format( diff_path ) print "cuppa: boost: info: Applying [{}] using [{}] in [{}]".format( as_info( env, diff_file ), as_info( env, command ), as_info( env, home ) ) if subprocess.call( shlex.split( command ), cwd=home ) != 0: print as_error( env, "cuppa: boost: error: Could not apply [{}]".format( diff_file ) ) with open( patch_applied_path, "w" ) as patch_applied_file: pass
def add_to_env(cls, env): try: generate = env.get_option('generate-cbs') if generate: obj = cls( env, env.get_option('generate_cbs_include_thirdparty'), env.get_option('generate_cbs_exclude_relative_branches'), env.get_option('generate_cbs_exclude_paths_starting'), env.get_option('generate_cbs_place_with_sconscript')) env['project_generators']['codeblocks'] = obj except CodeblocksException as error: logger.error( "Failed to create CodeBlocks project generator with error [{}]" .format(as_error(error)))
def process_storage_options( cuppa_env ): def get_normal_path( option, defaults_to ): path = cuppa_env.get_option( option, default=defaults_to ) return os.path.normpath( os.path.expanduser( path ) ) cuppa_env['build_root'] = get_normal_path( 'build_root', default.build_root ) cuppa_env['abs_build_root'] = os.path.abspath( cuppa_env['build_root'] ) cuppa_env['download_root'] = get_normal_path( 'download_root', default.download_root ) cuppa_env['cache_root'] = get_normal_path( 'cache_root', default.cache_root ) if not os.path.exists( cuppa_env['cache_root'] ): try: os.makedirs( cuppa_env['cache_root'] ) except os.error as e: logger.error( "Creating cache_root directory [{}] failed with error: {}" .format( cuppa_env['cache_root'], as_error(str(e)) ) ) raise
def process_storage_options(cuppa_env): def get_normal_path(option, defaults_to): path = cuppa_env.get_option(option, default=defaults_to) return os.path.normpath(os.path.expanduser(path)) cuppa_env['build_root'] = get_normal_path('build_root', default.build_root) cuppa_env['abs_build_root'] = os.path.abspath(cuppa_env['build_root']) cuppa_env['download_root'] = get_normal_path('download_root', default.download_root) cuppa_env['cache_root'] = get_normal_path('cache_root', default.cache_root) if not os.path.exists(cuppa_env['cache_root']): try: os.makedirs(cuppa_env['cache_root']) except os.error as e: logger.error( "Creating cache_root directory [{}] failed with error: {}". format(cuppa_env['cache_root'], as_error(str(e)))) raise
def _get_location(cls, env): import SCons.Errors location_id = cls.location_id(env) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] develop = location_id[1] branch = location_id[2] use_develop = location_id[3] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path) logger.debug( "Adding location [{}]({}) to cached locations".format( as_notice(cls._name.title()), as_notice(str(location_id)))) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]" .format(as_notice(cls._name.title()), as_info(str(location)), as_info(str(develop)), as_notice(str(use_develop and True or False)), as_notice(str(branch)), as_notice(str(cls._extra_sub_path)), as_error(str(error)))) raise SCons.Errors.StopError(error) else: logger.debug( "Loading location [{}]({}) from cached locations".format( as_notice(cls._name.title()), as_notice(str(location_id)))) return cls._cached_locations[location_id]
def _get_location( cls, env ): import SCons.Errors location_id = cls.location_id( env ) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] develop = location_id[1] branch = location_id[2] use_develop = location_id[3] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path ) logger.debug( "Adding location [{}]({}) to cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]" .format( as_notice( cls._name.title() ), as_info( str(location) ), as_info( str(develop) ), as_notice( str(use_develop and True or False) ), as_notice( str(branch) ), as_notice( str(cls._extra_sub_path) ), as_error( str(error) ) ) ) raise SCons.Errors.StopError( error ) else: logger.debug( "Loading location [{}]({}) from cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) return cls._cached_locations[location_id]
def _get_location(cls, env): location_id = cls.location_id(env) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] branch = location_id[1] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, branch=branch, extra_sub_path=cls._extra_sub_path) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] with branch [{}] and extra sub path [{}]. Failed with error [{}]" .format(as_notice(cls._name.title()), as_notice(str(location)), as_notice(str(branch)), as_notice(str(cls._extra_sub_path)), as_error(error))) return None return cls._cached_locations[location_id]
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip.download.url_to_path( location ) if not pip.download.is_url( location ): if pip.download.is_archive_file( location ): local_folder = self.folder_name_from_path( location ) local_directory = os.path.join( base, local_folder ) if os.path.exists( local_directory ): try: os.rmdir( local_directory ) except: return local_directory, False self.extract( location, local_directory ) else: local_directory = branch and os.path.join( location, branch ) or location return local_directory, False else: local_folder = self.folder_name_from_path( full_url ) local_directory = os.path.join( base, local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory return local_directory, True # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip.vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( location ) rev_options = self.get_rev_options( vc_type, vcs_backend ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) if os.path.exists( local_directory ): url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] logger.debug( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: vcs_backend.update( local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: action = "Cloning" if vc_type == "svn": action = "Checking out" logger.info( "{} [{}] into [{}]".format( action, as_info( location ), as_info( local_dir_with_sub_dir ) ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_error( location ), as_error( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_error( str(rev_options) ) ) or ""), as_error( str( error ) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) return local_directory, True
def get_local_directory(self, cuppa_env, location, sub_dir, branch, full_url): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs(base): base = os.path.join(cuppa_env['working_dir'], base) if location.startswith('file:'): location = pip_download.url_to_path(location) if not pip_is_url(location): if pip_is_archive_file(location): self._local_folder = self.folder_name_from_path( location, cuppa_env) local_directory = os.path.join(base, self._local_folder) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if os.path.exists(local_dir_with_sub_dir): try: os.rmdir(local_dir_with_sub_dir) except: return local_directory self.extract(location, local_dir_with_sub_dir) logger.debug("(local archive) Location = [{}]".format( as_info(location))) logger.debug("(local archive) Local folder = [{}]".format( as_info(self._local_folder))) else: local_directory = branch and os.path.join(location, branch) or location self._local_folder = self.folder_name_from_path( location, cuppa_env) logger.debug("(local file) Location = [{}]".format( as_info(location))) logger.debug("(local file) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env) local_directory = os.path.join(base, self._local_folder) if full_url.scheme.startswith( 'http') and self.url_is_download_archive_url( full_url.path): logger.debug("[{}] is an archive download".format( as_info(location))) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info(location))) logger.debug( "(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format( as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info( "[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder) logger.debug( "Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend(vc_type) if backend: try: vcs_backend = backend(self.expand_secret(location)) except: # Pip version >= 19 backend.url = self.expand_secret(location) vcs_backend = backend local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists(local_directory): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type) rev_options = self.get_rev_options(vc_type, vcs_backend, local_remote=remote) version = self.ver_rev_summary(branch, revision, self._full_url.path)[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " on {}".format( as_notice(str(rev_options))) or ""), as_info(version))) try: update(vcs_backend, local_dir_with_sub_dir, rev_options) logger.debug( "Successfully updated [{}]".format( as_info(location))) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]" .format(as_warning(location), as_warning(local_dir_with_sub_dir), (rev_options and " at {}".format( as_warning(str(rev_options))) or ""), as_warning(str(error)))) else: logger.debug( "Skipping update for [{}] as running in offline mode" .format(as_info(location))) else: rev_options = self.get_rev_options( vc_type, vcs_backend) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info("{} [{}] into [{}]{}".format( action, as_info(location), as_info(local_dir_with_sub_dir), attempt > 1 and "(attempt {})".format(str(attempt)) or "")) try: obtain(vcs_backend, local_dir_with_sub_dir, vcs_backend.url) logger.debug( "Successfully retrieved [{}]".format( as_info(location))) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]" .format(as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " to {}".format( as_notice(str(rev_options))) or ""), as_error(str(error)))) if attempt > max_attempts: raise LocationException(str(error)) logger.debug("(url path) Location = [{}]".format( as_info(location))) logger.debug("(url path) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory
def call_project_sconscript_files(self, toolchain, variant, target_arch, abi, sconscript_env, project): sconscript_file = project if os.path.exists(sconscript_file) and os.path.isfile(sconscript_file): logger.debug( "project exists and added to build [{}] using [{},{},{}]". format(as_notice(sconscript_file), as_notice(toolchain.name()), as_notice(variant), as_notice(target_arch))) path_without_ext = os.path.splitext(sconscript_file)[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file) name = os.path.splitext(sconscript_name)[0] sconscript_env['sconscript_name_id'] = name if name.lower() == "sconscript": sconscript_env['sconscript_name_id'] = "" path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] working_folder = 'working' sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name()) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path) sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir']) sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi) sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder) build_base_path = os.path.join(path_without_ext, sconscript_env['tool_variant_dir']) def flatten_dir(directory, join_char="_"): return join_char.join( os.path.normpath(directory).split(os.path.sep)) sconscript_env['build_base_path'] = build_base_path sconscript_env['flat_build_base'] = flatten_dir(build_base_path) sconscript_env['tool_variant_build_dir'] = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder) sconscript_env['build_dir'] = os.path.normpath( os.path.join(build_root, build_base_path, working_folder, '')) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir']) sconscript_env['build_tool_variant_dir'] = os.path.normpath( os.path.join(build_root, sconscript_env['tool_variant_dir'], working_folder, '')) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir']) sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join(sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'])) sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join(flatten_dir(sconscript_env['tool_variant_dir']), sconscript_env['offset_dir'])) sconscript_env[ 'final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir(abs_build_dir, final_dir): return os.path.isabs( final_dir) and final_dir or os.path.normpath( os.path.join(abs_build_dir, final_dir)) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir']) sconscript_env.AppendUnique(INCPATH=[sconscript_env['offset_dir']]) sconscript_exports = { 'env': sconscript_env, 'sconscript_env': sconscript_env, 'build_root': build_root, 'build_dir': sconscript_env['build_dir'], 'abs_build_dir': sconscript_env['abs_build_dir'], 'final_dir': sconscript_env['final_dir'], 'abs_final_dir': sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project': name, } self._configure.configure(sconscript_exports['env']) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports) if sconscript_env['dump']: logger.info("{} {}".format( as_info_label("Dumping ENV for"), as_info(sconscript_exports['build_dir']))) dump = sconscript_env.Dump() logger.info("\n" + dump + "\n") else: SCons.Script.SConscript( [sconscript_file], variant_dir=sconscript_exports['build_dir'], duplicate=0, exports=sconscript_exports) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error(sconscript_file), as_error(toolchain.name()), as_error(variant), as_error(target_arch)))
def lazy_create_path( path ): if not os.path.exists( path ): try: os.makedirs( path ) except os.error as e: if not os.path.exists( path ): logger.error( "Could not create path [{}]. Failed with error [{}]".format( as_notice(path), as_error(str(e)) ) )
def call_project_sconscript_files( self, toolchain, variant, target_arch, sconscript_env, project ): sconscript_file = project if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ): logger.debug( "project exists and added to build [{}] using [{},{},{}]".format( as_notice( sconscript_file ), as_notice( toolchain.name() ), as_notice( variant ), as_notice( target_arch ) ) ) path_without_ext = os.path.splitext( sconscript_file )[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file ) name = os.path.splitext( sconscript_name )[0] if name.lower() == "sconscript": path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() ) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path ) sconscript_env['build_dir'] = os.path.normpath( os.path.join( build_root, path_without_ext, toolchain.name(), variant, target_arch, 'working', '' ) ) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir'] ) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir( abs_build_dir, final_dir ): return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) ) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] ) sconscript_env.AppendUnique( INCPATH = [ sconscript_env['offset_dir'] ] ) sconscript_exports = { 'env' : sconscript_env, 'sconscript_env' : sconscript_env, 'build_root' : build_root, 'build_dir' : sconscript_env['build_dir'], 'abs_build_dir' : sconscript_env['abs_build_dir'], 'final_dir' : sconscript_env['final_dir'], 'abs_final_dir' : sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project' : name, } self._configure.configure( sconscript_exports['env'] ) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports ) SCons.Script.SConscript( [ sconscript_file ], variant_dir = sconscript_exports['build_dir'], duplicate = 0, exports = sconscript_exports ) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error( sconscript_file ), as_error( toolchain.name() ), as_error( variant ), as_error( target_arch ) ) )
def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir, test = os.path.split( executable ) if self._working_dir: working_dir = self._working_dir program_path = source[0].path suite = env['build_dir'] if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable if self._command: test_command = self._command working_dir = self._working_dir and self._working_dir or self._final_dir test = os.path.relpath( executable, working_dir ) test_suite = TestSuite.create( suite, env ) test_case = test_suite.enter_test( test, expected=self._expected ) show_test_output = env['show_test_output'] try: return_code = self._run_test( test_case, show_test_output, program_path, test_command, working_dir, env ) if return_code == self._expected_exit_code: test_suite.exit_test( test_case, 'passed' ) elif return_code < 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test was terminated by signal: {}".format( as_error(str(return_code) ) ) ) test_suite.exit_test( test_case, 'aborted' ) elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test returned with error code: {}".format( as_error(str(return_code) ) ) ) test_suite.exit_test( test_case, 'failed' ) else: test_suite.exit_test( test_case, 'passed' ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), test_suite.tests() ) if return_code == self._expected_exit_code: self._write_success_file( success_file_name_from( program_path ) ) elif return_code: self._remove_success_file( success_file_name_from( program_path ) ) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code) ) ) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code) ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError, e: logger.error( "Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)) ) ) raise BuildError( e )
def call_project_sconscript_files( self, toolchain, variant, target_arch, abi, sconscript_env, project ): sconscript_file = project if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ): logger.debug( "project exists and added to build [{}] using [{},{},{}]".format( as_notice( sconscript_file ), as_notice( toolchain.name() ), as_notice( variant ), as_notice( target_arch ) ) ) path_without_ext = os.path.splitext( sconscript_file )[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file ) name = os.path.splitext( sconscript_name )[0] sconscript_env['sconscript_name_id'] = name if name.lower() == "sconscript": sconscript_env['sconscript_name_id'] = "" path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] working_folder = 'working' sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() ) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path ) sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir'] ) sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi ) sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder ) build_base_path = os.path.join( path_without_ext, sconscript_env['tool_variant_dir'] ) def flatten_dir( directory, join_char="_" ): return join_char.join( os.path.normpath( directory ).split( os.path.sep ) ) sconscript_env['build_base_path'] = build_base_path sconscript_env['flat_build_base'] = flatten_dir( build_base_path ) sconscript_env['tool_variant_build_dir'] = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder ) sconscript_env['build_dir'] = os.path.normpath( os.path.join( build_root, build_base_path, working_folder, '' ) ) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir'] ) sconscript_env['build_tool_variant_dir'] = os.path.normpath( os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder, '' ) ) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir'] ) sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join( sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'] ) ) sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join( flatten_dir( sconscript_env['tool_variant_dir'] ), sconscript_env['offset_dir'] ) ) sconscript_env['final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir( abs_build_dir, final_dir ): return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) ) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] ) sconscript_env.AppendUnique( INCPATH = [ sconscript_env['offset_dir'] ] ) sconscript_exports = { 'env' : sconscript_env, 'sconscript_env' : sconscript_env, 'build_root' : build_root, 'build_dir' : sconscript_env['build_dir'], 'abs_build_dir' : sconscript_env['abs_build_dir'], 'final_dir' : sconscript_env['final_dir'], 'abs_final_dir' : sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project' : name, } self._configure.configure( sconscript_exports['env'] ) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports ) if sconscript_env['dump']: logger.info( "{} {}".format( as_info_label( "Dumping ENV for"), as_info( sconscript_exports['build_dir'] ) ) ) dump = sconscript_env.Dump() logger.info( "\n" + dump + "\n" ) else: SCons.Script.SConscript( [ sconscript_file ], variant_dir = sconscript_exports['build_dir'], duplicate = 0, exports = sconscript_exports ) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error( sconscript_file ), as_error( toolchain.name() ), as_error( variant ), as_error( target_arch ) ) )
def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir = self._working_dir and self._working_dir or os.path.split( executable )[0] program_path = source[0].path notifier = Notify(env, env['show_test_output']) if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' boost_version = None preprocess = self.default_preprocess argument_prefix = "" if 'boost' in env['dependencies']: boost_version = env['dependencies']['boost']( env ).numeric_version() if env['dependencies']['boost']( env ).patched_test(): argument_prefix="boost.test." test_command = executable + " --{0}log_format=hrf --{0}log_level=all --{0}report_level=no".format( argument_prefix ) if boost_version: if boost_version >= 1.67: preprocess = cuppa.utility.preprocess.AnsiEscape.strip test_command = executable + " --{0}log_format=HRF --{0}log_level=all --{0}report_level=no --{0}color_output=no".format( argument_prefix ) elif boost_version >= 1.60: test_command = executable + " --{0}log_format=HRF --{0}log_level=all --{0}report_level=no".format( argument_prefix ) try: return_code, tests = self.__run_test( program_path, test_command, working_dir, notifier, preprocess, env ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), tests ) if return_code < 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test was terminated by signal: {}".format( as_error(str(return_code) ) ) ) elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test returned with error code: {}".format( as_error(str(return_code) ) ) ) elif notifier.master_suite['status'] != 'passed': logger.error( "Not all test suites passed" ) raise BuildError( node=source[0], errstr="Not all test suites passed" ) if return_code: self._remove_success_file( success_file_name_from( program_path ) ) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code) ) ) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code) ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError as e: logger.error( "Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)) ) ) raise BuildError( e )
def get_local_directory_for_repository( self, location, sub_dir, full_url, local_directory ): vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend( vc_type ) if not backend: logger.error( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format( as_error( vc_type ), as_error( location ) ) ) raise LocationException( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format( vc_type, location ) ) if self._cuppa_env['dump'] or self._cuppa_env['clean']: return local_directory local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if not self._offline: try: vcs_backend = backend( self.expand_secret( location ) ) except: # Pip version >= 19 backend.url = self.expand_secret( location ) vcs_backend = backend if os.path.exists( local_directory ): self.update_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ) else: self.obtain_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ) logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: branched_local_directory = None if self.location_match_current_branch(): # If relative versioning is in play and we are offline check first to see # if the specified branch or tag is available and prefer that one if self._supports_relative_versioning and self._current_branch: branched_local_directory = local_directory + "@" + self._current_branch if os.path.exists( branched_local_directory ): return branched_local_directory elif self._supports_relative_versioning and self._current_revision: branched_local_directory = local_directory + "@" + self._current_revision if os.path.exists( branched_local_directory ): return branched_local_directory elif self._supports_relative_versioning and self._default_branch: branched_local_directory = local_directory + "@" + self._default_branch if os.path.exists( branched_local_directory ): return branched_local_directory # If the preferred branch is not available then fallback to the # default of no branch being specified if os.path.exists( local_directory ): return local_directory else: if self.location_match_current_branch(): logger.error( "Running in {offline} mode and neither [{local_dir}] or a branched dir" " [{branched_dir}] exists so location cannot be retrieved".format( offline = as_info_label("OFFLINE"), local_dir = as_error(local_directory), branched_dir = as_error(str(branched_local_directory)) ) ) raise LocationException( "Running in {offline} mode and neither [{local_dir}] or a branched dir" " [{branched_dir}] exists so location cannot be retrieved".format( offline = "OFFLINE", local_dir = local_directory, branched_dir = str(branched_local_directory) ) ) else: logger.error( "Running in {offline} mode and [{local_dir}] does not exist" " so location cannot be retrieved".format( offline = as_info_label("OFFLINE"), local_dir = as_error(local_directory) ) ) raise LocationException( "Running in {offline} mode and [{local_dir}] does not exist" " so location cannot be retrieved".format( offline = "OFFLINE", local_dir = local_directory ) ) return local_directory
def __call__(self, target, source, env): executable = str(source[0].abspath) working_dir, test = os.path.split(executable) if self._working_dir: working_dir = self._working_dir program_path = source[0].path suite = env['build_dir'] if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable if self._command: test_command = self._command working_dir = self._working_dir and self._working_dir or self._final_dir test = os.path.relpath(executable, working_dir) test_suite = TestSuite.create(suite, env) test_case = test_suite.enter_test(test, expected=self._expected) show_test_output = env['show_test_output'] try: return_code = self._run_test(test_case, show_test_output, program_path, test_command, working_dir, env) if return_code == self._expected_exit_code: test_suite.exit_test(test_case, 'passed') elif return_code < 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) logger.error("Test was terminated by signal: {}".format( as_error(str(return_code)))) test_suite.exit_test(test_case, 'aborted') elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) logger.error("Test returned with error code: {}".format( as_error(str(return_code)))) test_suite.exit_test(test_case, 'failed') else: test_suite.exit_test(test_case, 'passed') cuppa.test_report.cuppa_json.write_report( report_file_name_from(program_path), test_suite.tests()) if return_code == self._expected_exit_code: self._write_success_file(success_file_name_from(program_path)) elif return_code: self._remove_success_file(success_file_name_from(program_path)) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code))) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code))) else: self._write_success_file(success_file_name_from(program_path)) return None except OSError, e: logger.error("Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)))) raise BuildError(e)
def _run_gcov(self, env, source_path, gcov_path, gcov_log_path): working_dir = env['working_dir'] build_dir = env['build_dir'] final_dir = self._final_dir qualified_base = source_path.startswith( env['build_dir']) and env['build_dir'] or env['offset_dir'] if qualified_base.startswith("./"): qualified_base = qualified_base[2:] qualified_base = qualified_base.replace(os.path.sep, '#') logger.trace("Qualified base = [{}]".format( as_notice(str(qualified_base)))) if not os.path.isabs(self._final_dir): final_dir = os.path.normpath( os.path.join(build_dir, self._final_dir)) suite_name = working_dir + self._program_id coverage_suite = CoverageSuite.create( self._program_id, suite_name, env, final_dir, include_patterns=self._include_patterns, exclude_patterns=self._exclude_patterns) relative_only = "-r" if self._coverage_tool.startswith("llvm-cov"): relative_only = "" command = '{gcov} -o {path} -l -p {relative} -c -b {source}'.format( gcov=self._coverage_tool, path=gcov_path, relative=relative_only, source=source_path) return_code, output = run_command(command, working_dir, env) if return_code == 0: gcov_source_path = source_path.replace(os.path.sep, '#') gcov_files = glob.glob(gcov_source_path + '*gcov') for gcov_file in gcov_files: filename, ext = os.path.splitext(str(gcov_file)) filename = filename + self._program_id + ext new_filename = filename[len(qualified_base) + 1:] logger.trace("Move GCOV [{}] to [{}]...".format( as_notice(str(gcov_file)), as_notice(new_filename))) new_gcov_file = os.path.join(build_dir, new_filename) try: os.rename(str(gcov_file), new_gcov_file) except OSError as e: logger.error( "Failed moving gcov file [{}] to [{}] with error: {}". format(as_notice(str(gcov_file)), as_notice(new_gcov_file), as_error(str(e)))) with open(gcov_log_path, 'w') as summary_file: summary_file.write(output) coverage_suite.run_suite(self._target) else: sys.stdout.write(output + "\n") os.remove(gcov_log_path)
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip_download.url_to_path( location ) if not pip_download.is_url( location ): if pip_download.is_archive_file( location ): self._local_folder = self.folder_name_from_path( location, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if os.path.exists( local_dir_with_sub_dir ): try: os.rmdir( local_dir_with_sub_dir ) except: return local_directory self.extract( location, local_dir_with_sub_dir ) logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: local_directory = branch and os.path.join( location, branch ) or location self._local_folder = self.folder_name_from_path( location, cuppa_env ) logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) ) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( error ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( self.expand_secret( location ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists( local_directory ): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type ) rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: update( vcs_backend, local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) ) else: rev_options = self.get_rev_options( vc_type, vcs_backend ) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info( "{} [{}] into [{}]{}".format( action, as_info( location ), as_info( local_dir_with_sub_dir ), attempt > 1 and "(attempt {})".format( str(attempt) ) or "" ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_notice( str(rev_options) ) ) or ""), as_error( str(error) ) ) ) if attempt > max_attempts: raise LocationException( str(error) ) logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory
def _get_location( cls, env ): location_id = cls.location_id( env ) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] branch = location_id[1] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, branch ) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] with branch [{}]. Failed with error [{}]" .format( as_notice( cls._name.title() ), as_notice( str(location) ), as_notice( str(branch) ), as_error( error ) ) ) return None return cls._cached_locations[location_id]
def _run_gcovr(self, target, build_dir, output_dir, working_dir, sconscript_id, include_regexes, exclude_regexes): cuppa.path.lazy_create_path(output_dir) command = 'gcovr -h' if not command_available(command): logger.warning("Skipping gcovr output as not available") return html_base_name = url_coverage_base_name( sconscript_id) + "." + self._program_id[2:] index_file = html_base_name + ".html" regex_filter = re.escape(os.path.join(build_dir, "")).replace( "\_", "_").replace("\#", "#") regex_filter = ".*" + regex_filter + ".*" + self._program_id + "\.gcov" gcov_includes = "" for include_regex in include_regexes: gcov_includes += ' --gcov-filter="{}"'.format(include_regex) if not gcov_includes: gcov_includes = ' --gcov-filter="{}"'.format(regex_filter) gcov_excludes = "" for exclude_regex in exclude_regexes: gcov_excludes += ' --gcov-exclude="{}"'.format(exclude_regex) command = 'gcovr -g {gcov_includes} {gcov_excludes} -s -k -r . --html --html-details -o {index_file}'.format( regex_filter=regex_filter, gcov_includes=gcov_includes, gcov_excludes=gcov_excludes, index_file=index_file) return_code, output = run_command(command, working_dir, self._scons_env) coverage_index_basename = "coverage" + self._url_program_id + ".html" new_index_file = os.path.join(output_dir, coverage_index_basename) try: os.rename(index_file, new_index_file) except OSError as e: logger.error( "Failed moving coverage file from [{}] to [{}] with error: {}". format(as_notice(index_file), as_notice(new_index_file), as_error(str(e)))) coverage_summary_path = os.path.splitext(new_index_file)[0] + ".log" with open(coverage_summary_path, 'w') as coverage_summary_file: coverage_summary_file.write(coverage_index_basename + "\n" + output) logger.trace("gcovr HTML file filter = [{}]".format( as_notice(html_base_name))) coverage_files = Glob(html_base_name + '*.html') for coverage_file in coverage_files: new_coverage_file = os.path.join(output_dir, str(coverage_file)) target.append(new_coverage_file) try: os.rename(str(coverage_file), new_coverage_file) except OSError as e: logger.error( "Failed moving coverage file from [{}] to [{}] with error: {}" .format(as_notice(str(coverage_file)), as_notice(new_coverage_file), as_error(str(e)))) coverage_filter_path = os.path.join( output_dir, "coverage" + self._url_program_id + ".cov_filter") with open(coverage_filter_path, 'w') as coverage_filter_file: coverage_filter_file.write(html_base_name + '*.html') sys.stdout.write(output + "\n")