def _lazy_update_library_list(env, emitting, libraries, prebuilt_libraries, add_dependents, linktype, boost, stage_dir): def build_with_library_name(library): return library == 'log_setup' and 'log' or library if add_dependents: if not emitting: libraries = set( build_with_library_name(l) for l in add_dependent_libraries(boost, linktype, libraries)) else: libraries = add_dependent_libraries(boost, linktype, libraries) if not stage_dir in prebuilt_libraries: logger.trace("Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries))) prebuilt_libraries[stage_dir] = set(libraries) else: logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries))) libraries = [ l for l in libraries if l not in prebuilt_libraries[stage_dir] ] prebuilt_libraries[stage_dir].update(libraries) return libraries
def get_option( cls, option, default=None ): if option in cls._cached_options: return cls._cached_options[ option ] value = SCons.Script.GetOption( option ) source = None if value == None or value == '': if cls._options['default_options'] and option in cls._options['default_options']: value = cls._options['default_options'][ option ] source = "in the sconstruct file" elif default: value = default source = "using default" else: source = "on command-line" if option in cls._options['configured_options']: source = "using configure" if value: logger.debug( "option [{}] set {} as [{}]".format( as_info( option ), source, as_info( str(value) ) ) ) cls._cached_options[option] = value return value
def _determine_latest_boost_verion(offline): current_release = "1.71.0" if not offline: try: boost_version_url = 'https://www.boost.org/users/download/' logger.info("Checking current boost version from {}...".format( as_info(boost_version_url))) html = lxml.html.parse(urlopen(boost_version_url)) current_release = html.xpath( "/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span" )[0].text current_release = str( re.search(r'(\d[.]\d+([.]\d+)?)', current_release).group(1)) logger.info("Latest boost release detected as [{}]".format( as_info(current_release))) except Exception as e: logger.warn( "Cannot determine latest version of boost - [{}]. Assuming [{}]." .format(str(e), current_release)) else: logger.info( "In offline mode. No version of boost specified so assuming [{}]". format(as_info(current_release))) return current_release
def build_library_from_source( self, env, sources=None, library_name=None, linktype=None ): from SCons.Script import Flatten if not self._source_path and not sources: logger.warn( "Attempting to build library when source path is None" ) return None if not library_name: library_name = self._name if not linktype: linktype = self._linktype variant_key = env['tool_variant_dir'] prebuilt_objects = self.lazy_create_node( variant_key, self._prebuilt_objects ) prebuilt_libraries = self.lazy_create_node( variant_key, self._prebuilt_libraries ) local_dir = self._location.local() local_folder = self._location.local_folder() build_dir = os.path.abspath( os.path.join( env['abs_build_root'], local_folder, env['tool_variant_working_dir'] ) ) final_dir = os.path.abspath( os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) ) logger.debug( "build_dir for [{}] = [{}]".format( as_info(self._name), build_dir ) ) logger.debug( "final_dir for [{}] = [{}]".format( as_info(self._name), final_dir ) ) obj_suffix = env['OBJSUFFIX'] obj_builder = env.StaticObject lib_builder = env.BuildStaticLib if linktype == "shared": obj_suffix = env['SHOBJSUFFIX'] obj_builder = env.SharedObject lib_builder = env.BuildSharedLib if not sources: sources = env.RecursiveGlob( "*.cpp", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) sources.extend( env.RecursiveGlob( "*.cc", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) ) sources.extend( env.RecursiveGlob( "*.c", start=self._source_path, exclude_dirs=[ env['build_dir'] ] ) ) objects = [] for source in Flatten( [sources] ): rel_path = os.path.relpath( str(source), local_dir ) rel_obj_path = os.path.splitext( rel_path )[0] + obj_suffix obj_path = os.path.join( build_dir, rel_obj_path ) if not rel_obj_path in prebuilt_objects: prebuilt_objects[rel_obj_path] = obj_builder( obj_path, source ) objects.append( prebuilt_objects[rel_obj_path] ) if not linktype in prebuilt_libraries: library = lib_builder( library_name, objects, final_dir = final_dir ) if linktype == "shared": library = env.Install( env['abs_final_dir'], library ) prebuilt_libraries[linktype] = library else: logger.trace( "using existing library = [{}]".format( str(prebuilt_libraries[linktype]) ) ) return prebuilt_libraries[linktype]
def get_cached_archive( self, cache_root, path ): logger.debug( "Checking for cached archive [{}]...".format( as_info( path ) ) ) for archive in os.listdir(cache_root): if fnmatch.fnmatch( archive, path ): logger.debug( "Found cached archive [{}] skipping download".format( as_info( archive ) ) ) return os.path.join( cache_root, archive ) return None
def get_branch(cls, path): branch = None try: result = cls.execute_command( "{git} symbolic-ref HEAD".format(git=cls.binary()), path) branch = result.replace("refs/heads/", "").strip() logger.trace("Branch (using symbolic-ref) for [{}] is [{}]".format( as_notice(path), as_info(branch))) return branch except cls.Error: pass # In case we have a detached head we can fallback to this result = cls.execute_command( "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()), path) match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result) if match: branches = [b.strip() for b in match.group("branches").split(',')] logger.trace("Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches))) if len(branches) == 1: # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name # otherwise this will simply extract the branch_name as expected branch = branches[0].replace(': ', '/').split('/')[1] else: branch = branches[-2].split('/')[1] logger.trace("Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch))) else: logger.warn("No branch found from [{}]".format(result)) return branch
def __init__( self, cuppa_env, location, branch=None, extra_sub_path=None, name_hint=None ): self._location = location self._full_url = urlparse.urlparse( location ) self._sub_dir = "" self._name_hint = name_hint if extra_sub_path: if os.path.isabs( extra_sub_path ): raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) ) else: self._sub_dir = os.path.normpath( extra_sub_path ) ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source ## once this is done local_directory, use_sub_dir = self.get_local_directory( cuppa_env, location, self._sub_dir, branch, self._full_url ) self._base_local_directory = local_directory self._local_directory = use_sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory ## Now that we have a locally accessible version of the dependency we can try to collate some information ## about it to allow us to specify what we are building with. self._url, self._repository, self._branch, self._revision = self.get_info( self._location, self._local_directory, self._full_url ) self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path ) logger.debug( "Using [{}]{} at [{}] stored in [{}]".format( as_info( location ), ( branch and ":[{}]".format( as_info( str(branch) ) ) or "" ), as_info( self._version ), as_notice( self._local_directory ) ) )
def __call__( self, target, source, env ): logger.trace( "target = [{}]".format( colour_items( [ str(node) for node in target ] ) ) ) logger.trace( "source = [{}]".format( colour_items( [ str(node) for node in source ] ) ) ) for html_report_src_tgt, json_report_src_tgt in zip(*[iter(itertools.izip( source, target ))]*2): html_report = html_report_src_tgt[0] json_report = json_report_src_tgt[0] html_target = html_report_src_tgt[1] json_target = json_report_src_tgt[1] logger.trace( "html_report = [{}]".format( as_notice( str(html_report) ) ) ) logger.trace( "json_report = [{}]".format( as_info( str(json_report) ) ) ) logger.trace( "html_target = [{}]".format( as_notice( str(html_target) ) ) ) logger.trace( "json_target = [{}]".format( as_info( str(json_target) ) ) ) destination = env['abs_final_dir'] if self._destination: destination = self._destination + destination_subdir( env ) logger.trace( "report_summary = {}".format( str( self._read( str(json_report) ) ) ) ) env.Execute( Copy( html_target, html_report ) ) env.Execute( Copy( json_target, json_report ) ) return None
def GenerateHtmlTestReport( self, target, source, env ): self._base_uri = "" if self._auto_link_tests: self._base_uri = initialise_test_linking( env, link_style=self._link_style ) # Each source will result in one or more targets so we need to slice the targets to pick up # the gcov target (the first one) before we perform the zip iteration for s, t in itertools.izip( source, itertools.islice( target, 0, None, len(target)/len(source) ) ): #for s, t in itertools.izip( source, target ): test_suites = {} logger.trace( "source = [{}]".format( as_info(str(s)) ) ) logger.trace( "target = [{}]".format( as_info(str(t)) ) ) test_cases = self._read( s.abspath ) for test_case in test_cases: if not 'assertions_count' in test_case: if 'assertions' in test_case: test_case['assertions_count'] = test_case['assertions'] test_case['assertions_passed'] = test_case['passed'] test_case['assertions_failed'] = test_case['failed'] test_case['assertions_aborted'] = test_case['aborted'] else: test_case['assertions_count'] = 0 test_case['assertions_passed'] = 0 test_case['assertions_failed'] = 0 test_case['assertions_aborted'] = 0 self._add_to_test_suites( test_suites, test_case ) self._write( str(t), env, test_suites, sort_test_cases=self._sort_test_cases ) return None
def get_local_directory_for_non_url( self, location, sub_dir, branch_path, base ): if pip_is_archive_file( location ): self._local_folder = self.folder_name_from_path( location ) local_directory = os.path.join( base, self._local_folder ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if os.path.exists( local_dir_with_sub_dir ): try: os.rmdir( local_dir_with_sub_dir ) except: return local_directory self.extract( location, local_dir_with_sub_dir ) logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: local_directory = branch_path and os.path.join( location, branch_path ) or location self._local_folder = self.folder_name_from_path( location ) logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory
def load( self ): self._show = self._env.get_option( 'show_conf' ) self._save = self._env.get_option( 'save_conf' ) self._remove = self._env.get_option( 'remove_settings' ) self._update = self._env.get_option( 'update_conf' ) self._clear = self._env.get_option( 'clear_conf' ) self._configure = self._save or self._remove or self._update self._clean = self._env.get_option( 'clean' ) self._unconfigure = ( self._save and self._clean ) or self._clear if self._unconfigure: self._configure = False logger.info( "{}".format( as_notice( "Clear configuration requested..." ) ) ) if os.path.exists( self._conf_path ): logger.info( "Removing configure file [{}]".format( as_info( self._conf_path ) ) ) os.remove( self._conf_path ) else: logger.info( "Configure file [{}] does not exist. Unconfigure not needed".format( as_info( self._conf_path ) ) ) return elif self._configure: print logger.info( "{}".format( as_notice( "Update configuration requested..." ) ) ) if not self._save: self._loaded_options = self._load_conf() else: self._loaded_options = {} self._env['configured_options'] = self._loaded_options self._env['default_options'].update( self._loaded_options )
def obtain_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ): rev_options = self.get_rev_options( vc_type, vcs_backend ) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info( "{} [{}] into [{}]{}".format( action, as_info( location ), as_info( local_dir_with_sub_dir ), attempt > 1 and "(attempt {})".format( str(attempt) ) or "" ) ) try: obtain( vcs_backend, local_dir_with_sub_dir, vcs_backend.url ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_notice( str(rev_options) ) ) or ""), as_error( str(error) ) ) ) if attempt > max_attempts: raise LocationException( str(error) )
def apply_patch_if_needed(home, version_string): patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt") expected_diff_file = os.path.join( os.path.split(__file__)[0], "boost_test_patch_{}.diff".format(version_string)) available_diff_files = sorted(glob.glob( os.path.join(os.path.split(__file__)[0], "boost_test_patch_*.diff")), reverse=True) for diff_file in available_diff_files: if diff_file <= expected_diff_file: break logger.debug("Using diff file [{}]".format(as_info(diff_file))) if os.path.exists(patch_applied_path): logger.debug("[{}] already applied".format(as_info(diff_file))) return command = "patch --batch -p1 --input={}".format(diff_file) logger.info("Applying [{}] using [{}] in [{}]".format( as_info(diff_file), as_info(command), as_info(home))) if subprocess.call(shlex.split(command), cwd=home) != 0: logger.error("Could not apply [{}]".format(diff_file)) else: with open(patch_applied_path, "w") as patch_applied_file: pass
def _clear_config( self, conf_path ): if os.path.exists( conf_path ): logger.info( "Removing configure file [{}]".format( as_info( conf_path ) ) ) os.remove( conf_path ) else: logger.info( "Configure file [{}] does not exist. Unconfigure not needed".format( as_info( conf_path ) ) )
def _get_boost_location( cls, env, location, version, base, patched ): logger.debug( "Identify boost using location = [{}], version = [{}], base = [{}], patched = [{}]".format( as_info( str(location) ), as_info( str(version) ), as_info( str(base) ), as_info( str(patched) ) ) ) boost_home = None boost_location = None extra_sub_path = 'clean' if patched: extra_sub_path = 'patched' if location: location = cls.location_from_boost_version( location ) if not location: # use version as a fallback in case both at specified location = cls.location_from_boost_version( version ) boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path, name_hint="boost" ) elif base: # Find boost locally if not os.path.isabs( base ): base = os.path.abspath( base ) if not version: boost_home = base elif version: search_list = [ os.path.join( base, 'boost', version, 'source' ), os.path.join( base, 'boost', 'boost_' + version ), os.path.join( base, 'boost', version ), os.path.join( base, 'boost_' + version ), ] def exists_in( locations ): for location in locations: home = cls._home_from_path( location ) if home: return home return None boost_home = exists_in( search_list ) if not boost_home: raise BoostException("Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]".format(version, str([l for l in search_list]))) else: raise BoostException("Cannot construct Boost Object. No Home or Version specified") logger.debug( "Using boost found at [{}]".format( as_info( boost_home ) ) ) boost_location = cuppa.location.Location( env, boost_home, extra_sub_path=extra_sub_path ) else: location = cls.location_from_boost_version( version ) boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path ) if patched: cls.apply_patch_if_needed( boost_location.local() ) return boost_location
def add_to_env( cls, env, add_toolchain, add_to_supported ): for version in cls.supported_versions(): add_to_supported( version ) for version, gcc in cls.available_versions().iteritems(): logger.debug( "Adding toolchain [{}] reported as [{}] with cxx_version [g++{}] at [{}]" .format( as_info(version), as_info(gcc['version']), as_info(gcc['cxx_version']), as_notice(gcc['path']) ) ) add_toolchain( version, cls( version, gcc['cxx_version'], gcc['version'], gcc['path'] ) )
def _command_line_from_settings(self, settings): commands = [] for key, value in six.iteritems(settings): command = as_emphasised("--" + key) if value != True and value != False: if not isinstance(value, list): command += "=" + as_info(str(value)) else: command += "=" + as_info(",".join(value)) commands.append(command) commands.sort() return " ".join(commands)
def _command_line_from_settings( self, settings ): commands = [] for key, value in settings.iteritems(): command = as_emphasised( "--" + key ) if value != True and value != False: if not isinstance( value, list ): command += "=" + as_info( str(value) ) else: command += "=" + as_info( ",".join( value ) ) commands.append( command ) commands.sort() return " ".join( commands )
def get_local_directory_for_download_url(self, location, sub_dir, local_directory): logger.debug("[{}] is an archive download".format(as_info(location))) local_dir_with_sub_dir = os.path.join(local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug("(already present) Location = [{}]".format( as_info(location))) logger.debug("(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if self._cuppa_env['dump'] or self._cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format(as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info("[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if self._cuppa_env['cache_root']: cached_archive = os.path.join( self._cuppa_env['cache_root'], self._local_folder) logger.debug("Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error("Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) return local_directory
def log_exception( error, suppress=None ): from cuppa.log import logger from cuppa.colourise import as_info if not suppress: logger.fatal( "Cuppa terminated by exception [{}: {}]".format( as_info( error.__class__.__name__ ), as_info( str(error) ) ) ) if not logger.isEnabledFor( logging.EXCEPTION ): logger.warn( "Use {} (or above) to see the stack".format( as_info( "--verbosity=exception" ) ) ) logger.exception( traceback.format_exc() )
def __call__( self, blocks_transferred, block_size, total_size ): percent = 100.0 * float(blocks_transferred) * float(block_size) / float(total_size) if percent >= self._expected: if percent >= 100.0: sys.stdout.write( "={} Complete\n".format( as_info("|") ) ) sys.stdout.flush() else: sys.stdout.write( "=" ) if percent >= float(self._report_percent): sys.stdout.write( as_info( str(self._report_percent) + "%" ) ) self._report_percent += self._percent_step sys.stdout.flush() self._expected += self._step
def __call_classmethod_for_classes_in_module(package, name, path, method, *args, **kwargs): try: filehandle, pathname, description = imp.find_module( name, path and [path] or None) try: try: qualified_name = package and package + "." + name or name module = sys.modules[qualified_name] except KeyError as error: module = imp.load_module(name, filehandle, pathname, description) for member_name in dir(module): member = getattr(module, member_name) if inspect.ismodule(member): if package: parent_package = package + "." + name else: parent_package = name __call_classmethod_for_classes_in_module( parent_package, member_name, pathname, method, *args, **kwargs) elif inspect.isclass(member): try: function = getattr(member, method) if callable(function): try: function(*args, **kwargs) except Exception as error: if logger.isEnabledFor(logging.EXCEPTION): logger.error( "[{}] in [{}] failed with error [{}]". format(as_info(str(method)), as_notice(str(member)), as_info(str(error)))) traceback.print_exc() raise error except AttributeError as ignore: pass finally: if filehandle: filehandle.close() except ImportError as error: pass
def remote_default_branch(cls, repository): command = "{git} ls-remote --symref {repository} HEAD".format( git=cls.binary(), repository=repository) result = cls.execute_command(command) if result: branch_pattern = r'ref[:]\s+refs/heads/(?P<default_branch>[^\s]+)\s+HEAD' match = re.search(branch_pattern, result) logger.trace( "When searching for default branch name for repoistory [{}] using regex [{}] the following match [{}] was returned" .format(as_info(repository), as_notice(branch_pattern), as_info(str(match)))) if match: return match.group('default_branch') return None
def _lazy_update_library_list( env, emitting, libraries, prebuilt_libraries, add_dependents, linktype, boost, stage_dir ): def build_with_library_name( library ): if library == 'log_setup': return 'log' elif library in { 'prg_exec_monitor', 'test_exec_monitor', 'unit_test_framework' }: return 'test' else: return library if add_dependents: if not emitting: libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) ) else: libraries = add_dependent_libraries( boost, linktype, libraries ) if not stage_dir in prebuilt_libraries: logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) ) prebuilt_libraries[ stage_dir ] = set( libraries ) else: logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) ) libraries = [ l for l in libraries if l not in prebuilt_libraries[ stage_dir ] ] prebuilt_libraries[ stage_dir ].update( libraries ) return libraries
def get_local_directory( self, location, sub_dir, branch_path, full_url ): logger.debug( "Determine local directory for [{location}] when {offline}".format( location=as_info(location), offline= self._offline and as_info_label("OFFLINE") or "online" ) ) local_directory = None base = self._cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( self._cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip_download.url_to_path( location ) if not pip_is_url( location ): return self.get_local_directory_for_non_url( location, sub_dir, branch_path, base ) else: self._local_folder = self.folder_name_from_path( full_url ) local_directory = os.path.join( base, self._local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): return self.get_local_directory_for_download_url( location, sub_dir, local_directory ) elif '+' in full_url.scheme: return self.get_local_directory_for_repository( location, sub_dir, full_url, local_directory ) return local_directory
def __init__( self ): self._step = 2.5 self._percent_step = 10 self._report_percent = self._percent_step self._expected = self._step sys.stdout.write( "cuppa: location: [info] Download progress {}".format( as_info("|") ) ) sys.stdout.flush()
def get_branch(cls, path): branch = None remote = None # In case we have a detached head we use this result = as_str( cls.execute_command( "{git} show -s --pretty=\%d HEAD".format(git=cls.binary()), path)) match = re.search(r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result) if match: branches = [b.strip() for b in match.group("branches").split(',')] logger.trace("Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches))) if len(branches) == 1: # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name # otherwise this will simply extract the branch_name as expected if not branches[0].startswith('tag:'): remote = branches[0] branch = branches[0].replace(': ', '/').split('/')[1] else: remote = branches[-2] branch = remote.split('/')[1] logger.trace("Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch))) else: logger.warn("No branch found from [{}]".format(result)) return branch, remote
def remove_common_top_directory_under( self, path ): dirs = os.listdir( path ) if not dirs: raise LocationException( "Uncompressed archive [{}] is empty".format( path ) ) top_dir = os.path.join( path, dirs[0] ) if len(dirs) == 1 and os.path.isdir( top_dir ): logger.debug( "Removing redundant top directory [{}] from [{}]".format( as_info( dirs[0] ), as_info( path ) ) ) # we have a single top-level directory move_dirs = os.listdir( top_dir ) for d in move_dirs: shutil.move( os.path.join( top_dir, d ), os.path.join( path, d ) ) shutil.rmtree( top_dir ) return True return False
def summary_name(cls, filename, toolchain_variant_dir, offset_dir, sconscript_name): name = os.path.splitext(filename)[0] if name.startswith(coverage_index_marker): name = name.replace(coverage_index_marker, "") logger.trace( "filename = [{}], toolchain_variant_dir = [{}], offset_dir = [{}], sconscript_name = [{}]" .format( as_info(filename), as_notice(toolchain_variant_dir), as_info(offset_dir), as_info(sconscript_name), )) return "./{}/{}".format(offset_dir, sconscript_name and sconscript_name or "*")
def remove_common_top_directory_under(cls, path): dirs = os.listdir(path) if not dirs: raise LocationException( "Uncompressed archive [{}] is empty".format(path)) top_dir = os.path.join(path, dirs[0]) if len(dirs) == 1 and os.path.isdir(top_dir): logger.debug( "Removing redundant top directory [{}] from [{}]".format( as_info(dirs[0]), as_info(path))) # we have a single top-level directory move_dirs = os.listdir(top_dir) for d in move_dirs: shutil.move(os.path.join(top_dir, d), os.path.join(path, d)) shutil.rmtree(top_dir) return True return False
def get_rev_options(self, vc_type, vcs_backend, local_remote=None): url, rev = get_url_rev(vcs_backend) logger.debug( "make_rev_options for [{}] at url [{}] with rev [{}]/[{}]".format( as_info(vc_type), as_notice(str(url)), as_notice(str(rev)), as_notice(str(local_remote)))) return make_rev_options(vc_type, vcs_backend, url, rev, local_remote)
def run(*args, **kwargs): from inspect import getframeinfo, stack caller = getframeinfo(stack()[1][0]) sconsctruct_path = caller.filename import traceback from cuppa.log import logger, initialise_logging from cuppa.colourise import as_info import logging initialise_logging() try: import cuppa.core cuppa.core.run(sconsctruct_path, *args, **kwargs) except Exception as error: logger.error("Cuppa terminated by exception [{}: {}]".format( as_info(error.__class__.__name__), as_info(str(error)))) if not logger.isEnabledFor(logging.EXCEPTION): logger.error("Use {} (or above) to see the stack".format( as_info("--verbosity=exception"))) logger.exception(traceback.format_exc())
def extract( cls, filename, target_dir ): os.makedirs( target_dir ) if tarfile.is_tarfile( filename ): logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) ) try: with tarfile.TarFile( filename ) as tf: tf.extractall( target_dir ) except tarfile.ReadError: command = "tar -xf {filename}".format( filename=filename ) if subprocess.call( shlex.split( command ), cwd=target_dir ) != 0: raise LocationException( "Could not untar downloaded file from [{}]".format( filename ) ) if zipfile.is_zipfile( filename ): logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) ) with zipfile.ZipFile( filename ) as zf: zf.extractall( target_dir ) while cls.remove_common_top_directory_under( target_dir ): pass
def extract( self, filename, target_dir ): os.makedirs( target_dir ) if tarfile.is_tarfile( filename ): logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) ) try: with tarfile.TarFile( filename ) as tf: tf.extractall( target_dir ) except tarfile.ReadError: command = "tar -xf {filename}".format( filename=filename ) if subprocess.call( shlex.split( command ), cwd=target_dir ) != 0: raise LocationException( "Could not untar downloaded file from [{}]".format( filename ) ) if zipfile.is_zipfile( filename ): logger.debug( "Extracting [{}] into [{}]".format( as_info( filename ), as_info( target_dir ) ) ) with zipfile.ZipFile( filename ) as zf: zf.extractall( target_dir ) while self.remove_common_top_directory_under( target_dir ): pass
def _read(cls, json_report_path, default={}): with open(json_report_path, "r") as report: try: report = json.load(report) return report except ValueError as error: logger.error( "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing" .format(as_info(json_report_path), as_error(str(error)))) return default
def _determine_latest_boost_verion( offline ): current_release = "1.69.0" if not offline: try: boost_version_url = 'https://www.boost.org/users/download/' logger.info( "Checking current boost version from {}...".format( as_info( boost_version_url ) ) ) html = lxml.html.parse( urllib2.urlopen( boost_version_url ) ) current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) ) logger.info( "Latest boost release detected as [{}]".format( as_info( current_release ) ) ) except Exception as e: logger.warn( "Cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) ) else: logger.info( "In offline mode. No version of boost specified so assuming [{}]".format( as_info( current_release ) ) ) return current_release
def get_branch(cls, path): branch = None remote = None # In case we have a detached head we use this result = cls.execute_command( "{git} show -s --pretty=\%d --decorate=full HEAD".format( git=cls.binary()), path) match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?', result) if match: refs = [{ "ref": r.strip(), "type": "" } for r in match.group("refs").split(',')] logger.trace("Refs (using show) for [{}] are [{}]".format( as_notice(path), colour_items((r["ref"] for r in refs)))) if refs: for ref in refs: if ref["ref"].startswith("refs/heads/"): ref["ref"] = ref["ref"][len("refs/heads/"):] ref["type"] = "L" elif ref["ref"].startswith("refs/tags/"): ref["ref"] = ref["ref"][len("refs/tags/"):] ref["type"] = "T" elif ref["ref"].startswith("refs/remotes/"): ref["ref"] = ref["ref"][len("refs/remotes/"):] ref["type"] = "R" else: ref["type"] = "U" logger.trace( "Refs (after classification) for [{}] are [{}]".format( as_notice(path), colour_items( (":".join([r["type"], r["ref"]]) for r in refs)))) if refs[0]["type"] == "L": branch = refs[0]["ref"] elif refs[0]["type"] == "T": branch = refs[0]["ref"] elif refs[0]["type"] == "R": branch = refs[0]["ref"].split('/')[1] remote = next( (ref["ref"] for ref in refs if ref["type"] == "R"), None) logger.trace("Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(str(branch)))) else: logger.warn("No branch found from [{}]".format(result)) return branch, remote
def update_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type ) rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: update( vcs_backend, local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) )
def get_rev_options( self, vc_type, vcs_backend, local_remote=None ): url, rev = get_url_rev( vcs_backend ) logger.debug( "make_rev_options for [{}] at url [{}] with rev [{}]/[{}]".format( as_info( vc_type ), as_notice( str(url) ), as_notice( str(rev) ), as_notice( str(local_remote) ) ) ) return make_rev_options( vc_type, vcs_backend, url, rev, local_remote )
def apply_patch_if_needed(home): patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt") diff_file = "boost_test_patch.diff" if os.path.exists(patch_applied_path): logger.debug("[{}] already applied".format(as_info(diff_file))) return diff_path = os.path.join(os.path.split(__file__)[0], "boost", diff_file) command = "patch --batch -p1 --input={}".format(diff_path) logger.info("Applying [{}] using [{}] in [{}]".format( as_info(diff_file), as_info(command), as_info(home))) if subprocess.call(shlex.split(command), cwd=home) != 0: logger.error("Could not apply [{}]".format(diff_file)) with open(patch_applied_path, "w") as patch_applied_file: pass
def _read( cls, json_report_path, default={} ): with open( json_report_path, "r" ) as report: try: report = json.load( report ) return report except ValueError as error: logger.error( "Test Report [{}] does not contain valid JSON. Error [{}] encountered while parsing".format( as_info( json_report_path ), as_error( str(error) ) ) ) return default
def _get_location( cls, env ): import SCons.Errors location_id = cls.location_id( env ) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] develop = location_id[1] branch = location_id[2] use_develop = location_id[3] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path ) logger.debug( "Adding location [{}]({}) to cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]" .format( as_notice( cls._name.title() ), as_info( str(location) ), as_info( str(develop) ), as_notice( str(use_develop and True or False) ), as_notice( str(branch) ), as_notice( str(cls._extra_sub_path) ), as_error( str(error) ) ) ) raise SCons.Errors.StopError( error ) else: logger.debug( "Loading location [{}]({}) from cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) return cls._cached_locations[location_id]
def determine_latest_boost_verion(): current_release = "1.61.0" try: html = lxml.html.parse('http://www.boost.org/users/download/') current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) ) logger.debug( "latest boost release detected as [{}]".format( as_info( current_release ) ) ) except Exception as e: logger.warn( "cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) ) return current_release
def add_to_env( cls, env, add_toolchain, add_to_supported ): stdlib = None try: stdlib = env.get_option( 'clang-stdlib' ) suppress_debug_for_auto = env.get_option( 'clang-disable-debug-for-auto' ) except: pass for version in cls.supported_versions(): add_to_supported( version ) for version, clang in cls.available_versions().iteritems(): logger.debug( "Adding toolchain [{}] reported as [{}] with cxx_version [clang++{}] at [{}]".format( as_info(version), as_info(clang['version']), as_info(clang['cxx_version']), as_notice(clang['path']) ) ) add_toolchain( version, cls( version, clang['cxx_version'], clang['version'], clang['path'], stdlib, suppress_debug_for_auto ) )
def determine_latest_boost_verion( env ): current_release = "1.58.0" try: html = lxml.html.parse('http://www.boost.org/users/download/') current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) ) print "cuppa: boost: latest boost release detected as [{}]".format( as_info( env, current_release ) ) except Exception as e: print as_warning( env, "cuppa: boost: warning: cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) ) return current_release
def apply_patch_if_needed( home, version_string ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) expected_diff_file = os.path.join( os.path.split( __file__ )[0], "boost_test_patch_{}.diff".format( version_string ) ) available_diff_files = sorted( glob.glob( os.path.join( os.path.split( __file__ )[0], "boost_test_patch_*.diff" ) ), reverse=True ) for diff_file in available_diff_files: if diff_file <= expected_diff_file: break logger.debug( "Using diff file [{}]".format( as_info( diff_file ) ) ) if os.path.exists( patch_applied_path ): logger.debug( "[{}] already applied".format( as_info( diff_file ) ) ) return command = "patch --batch -p1 --input={}".format( diff_file ) logger.info( "Applying [{}] using [{}] in [{}]".format( as_info( diff_file ), as_info( command ), as_info( home ) ) ) if subprocess.call( shlex.split( command ), cwd=home ) != 0: logger.error( "Could not apply [{}]".format( diff_file ) ) else: with open( patch_applied_path, "w" ) as patch_applied_file: pass
def apply_patch_if_needed( cls, home ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) diff_file = "boost_test_patch.diff" if os.path.exists( patch_applied_path ): logger.debug( "[{}] already applied".format( as_info( diff_file ) ) ) return diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file ) command = "patch --batch -p1 --input={}".format( diff_path ) logger.info( "Applying [{}] using [{}] in [{}]".format( as_info( diff_file ), as_info( command ), as_info( home ) ) ) if subprocess.call( shlex.split( command ), cwd=home ) != 0: logger.error( "Could not apply [{}]".format( diff_file ) ) with open( patch_applied_path, "w" ) as patch_applied_file: pass
def _set_qt4_dir( self, env ): command = "pkg-config --cflags QtCore" try: cflags = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() if cflags: flags = env.ParseFlags( cflags ) if 'CPPPATH' in flags: shortest_path = flags['CPPPATH'][0] for include in flags['CPPPATH']: if len(include) < len(shortest_path): shortest_path = include env['QT4DIR'] = shortest_path logger.debug( "Q4DIR detected as [{}]".format( as_info( env['QT4DIR'] ) ) ) except: logger.debug( "In _set_qt4_dir() failed to execute [{}]".format( command ) )
def apply_patch_if_needed( self, env, home ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) diff_file = "boost_test_patch.diff" if os.path.exists( patch_applied_path ): print "cuppa: boost: [{}] already applied".format( as_info( env, diff_file ) ) return diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file ) command = "patch --batch -p1 --input={}".format( diff_path ) print "cuppa: boost: info: Applying [{}] using [{}] in [{}]".format( as_info( env, diff_file ), as_info( env, command ), as_info( env, home ) ) if subprocess.call( shlex.split( command ), cwd=home ) != 0: print as_error( env, "cuppa: boost: error: Could not apply [{}]".format( diff_file ) ) with open( patch_applied_path, "w" ) as patch_applied_file: pass
def check_current_version( offline ): installed_version = get_version() logger.info( "cuppa: version {}".format( as_info( installed_version ) ) ) if not offline: try: pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') latest_available = pypi.package_releases('cuppa')[0] if parse_version( installed_version ) < parse_version( latest_available ): logger.warn( "Newer version [{}] available. Upgrade using \"{}\"\n".format( as_warning( latest_available ), as_emphasised( "pip install -U cuppa" ) ) ) except: pass
def _set_qt5_dir( self, env ): command = "pkg-config --cflags Qt5Core" try: includes = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip().split() if includes: shortest_path = includes[0] for include in includes: if len(include) < len(shortest_path): shortest_path = include qt5dir = env.ParseFlags( shortest_path )['CPPPATH'][0] env['QT5DIR'] = qt5dir print "cuppa: qt5: Q5DIR detected as [{}]".format( as_info( env, env['QT5DIR'] ) ) except: #TODO: Warning? pass
def lazy_update_library_list( env, emitting, libraries, built_libraries, add_dependents, linktype, boost, stage_dir ): if add_dependents: if not emitting: libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) ) else: libraries = add_dependent_libraries( boost, linktype, libraries ) if not stage_dir in built_libraries: logger.trace( "Lazy update libraries list for [{}] to [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) ) built_libraries[ stage_dir ] = set( libraries ) else: logger.trace( "Lazy read libraries list for [{}]: libraries are [{}]".format( as_info(stage_dir), colour_items(str(l) for l in libraries) ) ) libraries = [ l for l in libraries if l not in built_libraries[ stage_dir ] ] return libraries