def write( self, project ): project_file = self._projects[project]['project_file'] directory = self._projects[project]['directory'] logger.debug( "Write [{}] for [{}]".format( as_notice( self._projects[project]['project_file'] ), as_notice( project ) ) ) if directory and not os.path.exists( directory ): os.makedirs( directory ) lines = [] lines += self._projects[project]['lines_header'] for target in sorted( self._projects[project]['targets'].itervalues() ): lines += target lines += [ '\t\t</Build>' ] for filepath in sorted( self._projects[project]['files'] ): lines += [ '\t\t<Unit filename="' + filepath + '" />' ] lines += self._projects[project]['lines_footer'] with open( project_file, "w" ) as cbs_file: cbs_file.write( "\n".join( lines ) )
def __call__( self, target, source, env ): logger.trace( "target = [{}]".format( colour_items( [ str(node) for node in target ] ) ) ) logger.trace( "source = [{}]".format( colour_items( [ str(node) for node in source ] ) ) ) for html_report_src_tgt, json_report_src_tgt in zip(*[iter(itertools.izip( source, target ))]*2): html_report = html_report_src_tgt[0] json_report = json_report_src_tgt[0] html_target = html_report_src_tgt[1] json_target = json_report_src_tgt[1] logger.trace( "html_report = [{}]".format( as_notice( str(html_report) ) ) ) logger.trace( "json_report = [{}]".format( as_info( str(json_report) ) ) ) logger.trace( "html_target = [{}]".format( as_notice( str(html_target) ) ) ) logger.trace( "json_target = [{}]".format( as_info( str(json_target) ) ) ) destination = env['abs_final_dir'] if self._destination: destination = self._destination + destination_subdir( env ) logger.trace( "report_summary = {}".format( str( self._read( str(json_report) ) ) ) ) env.Execute( Copy( html_target, html_report ) ) env.Execute( Copy( json_target, json_report ) ) return None
def load( self ): self._show = self._env.get_option( 'show_conf' ) self._save = self._env.get_option( 'save_conf' ) self._remove = self._env.get_option( 'remove_settings' ) self._update = self._env.get_option( 'update_conf' ) self._clear = self._env.get_option( 'clear_conf' ) self._configure = self._save or self._remove or self._update self._clean = self._env.get_option( 'clean' ) self._unconfigure = ( self._save and self._clean ) or self._clear if self._unconfigure: self._configure = False logger.info( "{}".format( as_notice( "Clear configuration requested..." ) ) ) if os.path.exists( self._conf_path ): logger.info( "Removing configure file [{}]".format( as_info( self._conf_path ) ) ) os.remove( self._conf_path ) else: logger.info( "Configure file [{}] does not exist. Unconfigure not needed".format( as_info( self._conf_path ) ) ) return elif self._configure: print logger.info( "{}".format( as_notice( "Update configuration requested..." ) ) ) if not self._save: self._loaded_options = self._load_conf() else: self._loaded_options = {} self._env['configured_options'] = self._loaded_options self._env['default_options'].update( self._loaded_options )
def __call__( self, target, source, env ): from SCons.Script import Flatten logger.debug( "reading template file [{}]".format( as_notice( str(source[0]) ) ) ) with open( str(Flatten(source)[0]), 'r' ) as template_file: logger.debug( "open target file [{}]".format( as_notice(str(target[0])) ) ) with open( str(target[0]), 'w' ) as expanded_file: logger.debug( "expand variables matching [{}]".format( as_notice(str(self._kwargs)) ) ) expanded_file.write( template_file.read().format( **self._kwargs ) ) return None
def get_rev_options( self, vc_type, vcs_backend, local_remote=None ): url, rev = get_url_rev( vcs_backend ) logger.debug( "make_rev_options for [{}] at url [{}] with rev [{}]/[{}]".format( as_info( vc_type ), as_notice( str(url) ), as_notice( str(rev) ), as_notice( str(local_remote) ) ) ) return make_rev_options( vc_type, vcs_backend, url, rev, local_remote )
def __call__( self, node ): file_path = str(node) for excluded in self._excluded_paths: if file_path.startswith( excluded ): return path, ext = os.path.splitext( file_path ) if ext and ext in self._ignored_types: return for allowed in self._allowed_paths: prefix = os.path.commonprefix( [ os.path.abspath( file_path ), allowed ] ) logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format( as_notice( str(node) ), as_notice( node.path ), as_notice( str(allowed) ), as_notice( str(prefix) ) ) ) if prefix != allowed: return logger.trace( "str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format( as_notice( str(node) ), as_notice( node.path ), as_notice( str(allowed) ), as_notice( str(prefix) ) ) ) file_path = os.path.relpath( os.path.abspath( file_path ), self._base_path ) self._files.add( file_path ) return
def __call__( self, env, source, **kwargs ): sources = Flatten( [ source ] ) objects = [] if 'CPPPATH' in env: env.AppendUnique( INCPATH = env['CPPPATH'] ) if self._shared: obj_prefix = env.subst('$SHOBJPREFIX') obj_suffix = env.subst('$SHOBJSUFFIX') obj_builder = env.SharedObject else: obj_prefix = env.subst('$OBJPREFIX') obj_suffix = env.subst('$OBJSUFFIX') obj_builder = env.Object logger.trace( "Build Root = [{}]".format( as_notice( env['build_root'] ) ) ) for source in sources: if not isinstance( source, Node ): source = env.File( source ) logger.trace( "Object source = [{}]/[{}]".format( as_notice(str(source)), as_notice(source.path) ) ) if os.path.splitext(str(source))[1] == obj_suffix: objects.append( source ) else: target = None target = os.path.splitext( os.path.split( str(source) )[1] )[0] if not source.path.startswith( env['build_root'] ): if os.path.isabs( str(source) ): target = env.File( os.path.join( obj_prefix + target + obj_suffix ) ) else: target = env.File( os.path.join( env['build_dir'], obj_prefix + target + obj_suffix ) ) else: offset_dir = os.path.relpath( os.path.split( source.path )[0], env['build_dir'] ) target = env.File( os.path.join( offset_dir, obj_prefix + target + obj_suffix ) ) logger.trace( "Object target = [{}]/[{}]".format( as_notice(str(target)), as_notice(target.path) ) ) objects.append( obj_builder( target = target, source = source, CPPPATH = env['SYSINCPATH'] + env['INCPATH'], **kwargs ) ) cuppa.progress.NotifyProgress.add( env, objects ) return objects
def __init__( self, env, include_thirdparty, exclude_branches, excluded_paths_starting, place_cbs_by_sconscript ): self._include_thirdparty = include_thirdparty self._exclude_branches = exclude_branches self._excluded_paths_starting = excluded_paths_starting and excluded_paths_starting or [] self._place_cbs_by_sconscript = place_cbs_by_sconscript self._projects = {} base_include = self._exclude_branches and env['base_path'] or env['branch_root'] base = os.path.realpath( base_include ) download = os.path.realpath( env['download_root'] ) thirdparty = env['thirdparty'] and os.path.realpath( env['thirdparty'] ) or None common, tail1, tail2 = cuppa.path.split_common( base, download ) download_under_base = common and not tail1 thirdparty_under_base = None if thirdparty: common, tail1, tail2 = cuppa.path.split_common( base, thirdparty ) thirdparty_under_base = common and not tail1 self._exclude_paths = self._excluded_paths_starting self._build_root = [ env['build_root'] ] if not self._include_thirdparty: if download_under_base: self._exclude_paths.append( env['download_root'] ) if thirdparty and thirdparty_under_base: self._exclude_paths.append( env['thirdparty'] ) self._include_paths = [ base_include ] if self._include_thirdparty: if not download_under_base: self._include_paths.append( env['download_root'] ) if thirdparty and not thirdparty_under_base: self._include_paths.append( env['thirdparty'] ) self._ignored_types = ignored_types( env ) cuppa.progress.NotifyProgress.register_callback( None, self.on_progress ) logger.debug( "Including Paths Under = {}".format( as_notice( str( self._include_paths ) ) ) ) logger.debug( "Excluding Paths Starting = {}".format( as_notice( str( self._exclude_paths ) ) ) )
def relative_start( env, start, default ): start, base_path = clean_start( env, start, default ) rel_start = os.path.relpath( base_path, start ) logger.trace( "paths: start = [{}], base_path = [{}], rel_start = [{}]" .format( as_notice( start ), as_notice( base_path ), as_notice( rel_start ) ) ) if not os.path.isabs( start ): start = rel_start return start, rel_start, base_path
def __init__( self, cuppa_env, location, branch=None, extra_sub_path=None, name_hint=None ): self._location = location self._full_url = urlparse.urlparse( location ) self._sub_dir = "" self._name_hint = name_hint if extra_sub_path: if os.path.isabs( extra_sub_path ): raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) ) else: self._sub_dir = os.path.normpath( extra_sub_path ) ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source ## once this is done local_directory, use_sub_dir = self.get_local_directory( cuppa_env, location, self._sub_dir, branch, self._full_url ) self._base_local_directory = local_directory self._local_directory = use_sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory ## Now that we have a locally accessible version of the dependency we can try to collate some information ## about it to allow us to specify what we are building with. self._url, self._repository, self._branch, self._revision = self.get_info( self._location, self._local_directory, self._full_url ) self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path ) logger.debug( "Using [{}]{} at [{}] stored in [{}]".format( as_info( location ), ( branch and ":[{}]".format( as_info( str(branch) ) ) or "" ), as_info( self._version ), as_notice( self._local_directory ) ) )
def name_from_dir( path ): if not os.path.isabs( path ): path = os.path.normpath( os.path.join( cuppa_env['sconstruct_dir'], path ) ) logger.debug( "normalised path = [{}]".format( path ) ) common, tail1, tail2 = split_common( cuppa_env['abs_sconscript_dir'], os.path.abspath( path ) ) logger.debug( "common[{}], tail1[{}], tail2[{}]".format( as_notice( common ), as_notice( tail1 ), as_notice( tail2 ) ) ) return tail2 and tail2 or ""
def make_rev_options( vc_type, vcs_backend, url, rev, local_remote ): logger.debug( "vc_type={vc_type}, url={url}, rev={rev}, local_remote={local_remote}".format( vc_type = as_info( str(vc_type) ), url = as_notice( str(url) ), rev = as_notice( str(rev) ), local_remote = as_notice( str(local_remote) ) ) ) if vc_type == 'git': if rev: return vcs_backend.make_rev_options( rev=rev ) #elif local_remote: #return vcs_backend.make_rev_options( rev=local_remote ) elif vc_type == 'hg' and rev: return vcs_backend.make_rev_options( rev=rev ) elif vc_type == 'bzr' and rev: return vcs_backend.make_rev_options( rev=rev ) return vcs_backend.make_rev_options()
def _get_location( cls, env ): location_id = cls.location_id( env ) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] branch = location_id[1] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, branch ) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] with branch [{}]. Failed with error [{}]" .format( as_notice( cls._name.title() ), as_notice( str(location) ), as_notice( str(branch) ), as_error( error ) ) ) return None return cls._cached_locations[location_id]
def _add_to_test_suites( cls, test_suites, test_case ): logger.trace( "test_case = [{}]".format( as_notice( str(test_case) ) ) ) suite = test_case['suite'] if not suite in test_suites: test_suites[suite] = {} cls._initialise_test_suite( suite, test_suites[suite] ) test_suite = test_suites[suite] test_suite['test_cases'].append( test_case ) cls._update_summary_stats( test_suite, test_case )
def _remove_settings( self ): initial_option_count = len(self._loaded_options) logger.info( "Remove settings requested for the following options {}".format( self._remove ) ) for setting in self._remove: if setting in self._loaded_options: del self._loaded_options[setting] logger.info( "Removing option [{}] as requested".format( as_notice( "--" + setting ) ) ) if initial_option_count != len(self._loaded_options): self._update_conf()
def filter_nodes( nodes, match_patterns, exclude_patterns=[] ): nodes = Flatten( nodes ) if not match_patterns and not exclude_patterns: return nodes if match_patterns: match_patterns = Flatten( [ match_patterns ] ) for i, match_pattern in enumerate(match_patterns): if is_string( match_pattern ): match_patterns[i] = re.compile( fnmatch.translate( match_pattern ) ) if exclude_patterns: exclude_patterns = Flatten( [ exclude_patterns ] ) for i, exclude_pattern in enumerate(exclude_patterns): if is_string( exclude_pattern ): exclude_patterns[i] = re.compile( fnmatch.translate( exclude_pattern ) ) filtered_nodes = [] for node in nodes: path = str( node ) logger.trace( "Node in nodes to filter = [{}][{}]".format( as_notice(path), as_notice(node.path) ) ) if exclude_patterns: excluded = False for exclude_pattern in exclude_patterns: if exclude_pattern.match( path ): excluded = True break if excluded: continue if not match_patterns: filtered_nodes.append( node ) else: for match_pattern in match_patterns: if match_pattern.match( path ): filtered_nodes.append( node ) return filtered_nodes
def on_sconstruct_end( self, env ): workspace_dir = os.path.join( env['working_dir'], "cbs" ) workspace_path = os.path.join( workspace_dir, "all.workspace" ) if workspace_dir and not os.path.exists( workspace_dir ): os.makedirs( workspace_dir ) logger.debug( "Write workspace [{}]".format( as_notice( workspace_path ) ) ) with open( workspace_path, "w" ) as workspace_file: workspace_file.write( "\n".join( self.create_workspace( self._projects ) ) )
def progress_action( label, event, sconscript, variant, env ): progress = Progress( event, sconscript, variant, env ) description = None if logger.isEnabledFor( logging.INFO ): stage = "" name = "" if label.startswith("#"): stage = as_notice( label[1:] ) elif not variant: stage = as_notice(label) + " sconscript: [" name = as_notice( sconscript ) + "]" else: stage = as_notice(label) + " variant: [" name = as_info( variant ) + "]" description = "Progress( {}{} )".format( stage, name ) return Action( progress, description )
def on_sconstruct_end( self, env ): workspace_dir = os.path.join( env['working_dir'], "cbs" ) workspace_path = os.path.join( workspace_dir, "all.workspace" ) if workspace_dir and not os.path.exists( workspace_dir ): os.makedirs( workspace_dir ) print "cuppa: project-generator (CodeBlocks): write workspace [{}]".format( as_notice( env, workspace_path ) ) with open( workspace_path, "w" ) as workspace_file: workspace_file.write( "\n".join( self.create_workspace( self._projects ) ) )
def get_branch( cls, path ): branch = None remote = None # In case we have a detached head we use this result = cls.execute_command( "{git} show -s --pretty=\%d HEAD".format( git=cls.binary() ), path ) match = re.search( r'[(]HEAD[^,]*[,] (?P<branches>[^)]+)[)]', result ) if match: branches = [ b.strip() for b in match.group("branches").split(',') ] logger.trace( "Branches (using show) for [{}] are [{}]".format( as_notice(path), colour_items(branches) ) ) if len(branches) == 1: # If this returns a tag: tag_name replace the ": " with "/" and then extract the tag_name # otherwise this will simply extract the branch_name as expected if not branches[0].startswith('tag:'): remote = branches[0] branch = branches[0].replace(': ','/').split('/')[1] else: remote = branches[-2] branch = remote.split('/')[1] logger.trace( "Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(branch) ) ) else: logger.warn( "No branch found from [{}]".format( result ) ) return branch, remote
def __call__( self, target, source, env ): for library in self._libraries: filename = None if self._linktype == 'static': filename = static_library_name( env, library, self._toolchain, self._boost.version(), self._variant, self._threading ) else: filename = shared_library_name( env, library, self._toolchain, self._boost.full_version(), self._variant, self._threading ) built_library_path = os.path.join( self._location, self._stage_dir, 'lib', filename ) logger.trace( "Emit Boost library [{}] to [{}]".format( as_notice(library), as_notice(built_library_path) ) ) node = File( built_library_path ) target.append( node ) return target, source
def _get_location( cls, env ): import SCons.Errors location_id = cls.location_id( env ) if not location_id: return None if location_id not in cls._cached_locations: location = location_id[0] develop = location_id[1] branch = location_id[2] use_develop = location_id[3] try: cls._cached_locations[location_id] = cuppa.location.Location( env, location, develop=develop, branch=branch, extra_sub_path=cls._extra_sub_path ) logger.debug( "Adding location [{}]({}) to cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) except cuppa.location.LocationException as error: logger.error( "Could not get location for [{}] at [{}] (and develop [{}], use=[{}]) with branch [{}] and extra sub path [{}]. Failed with error [{}]" .format( as_notice( cls._name.title() ), as_info( str(location) ), as_info( str(develop) ), as_notice( str(use_develop and True or False) ), as_notice( str(branch) ), as_notice( str(cls._extra_sub_path) ), as_error( str(error) ) ) ) raise SCons.Errors.StopError( error ) else: logger.debug( "Loading location [{}]({}) from cached locations".format( as_notice( cls._name.title() ), as_notice( str(location_id) ) ) ) return cls._cached_locations[location_id]
def __call__( self, env, pattern, start=default, exclude_dirs=default ): start, rel_start, base_path = relative_start( env, start, self.default ) if exclude_dirs == self.default: exclude_dirs = [ env['download_root'], env['build_root' ] ] exclude_dirs_regex = None if exclude_dirs: def up_dir( path ): element = next( e for e in path.split(os.path.sep) if e ) return element == ".." exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ] exclude_dirs = "|".join( exclude_dirs ) exclude_dirs_regex = re.compile( exclude_dirs ) matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex ) logger.trace( "matches = [{}]." .format( colour_items( [ str(match) for match in matches ] ) ) ) make_relative = True if rel_start.startswith( os.pardir ): make_relative = False logger.trace( "make_relative = [{}].".format( as_notice( str(make_relative) ) ) ) nodes = [ env.File( make_relative and os.path.relpath( match, base_path ) or match ) for match in matches ] logger.trace( "nodes = [{}]." .format( colour_items( [ str(node) for node in nodes ] ) ) ) return nodes
def execute_command(cls, command, path=None): try: logger.trace("Executing command [{command}]...".format( command=as_info(command))) result = as_str( subprocess.check_output(shlex.split(command), stderr=subprocess.STDOUT, cwd=path)).strip() logger.trace("Result of calling [{command}] was [{result}]".format( command=as_info(command), result=as_notice(result))) return result except subprocess.CalledProcessError as error: logger.trace( "Command [{command}] failed with exit code [{exit_code}]". format(command=as_warning(str(command)), exit_code=as_warning(str(error.returncode)))) raise cls.Error( "Command [{command}] failed".format(command=str(command))) except OSError: logger.trace("Binary [{git}] is not available".format( git=as_warning(cls.binary()))) raise cls.Error( "Binary [{git}] is not available".format(git=cls.binary()))
def retrieve_repo_info(cls, vcs_system, vcs_directory, expected_vc_type): if not expected_vc_type or expected_vc_type == vcs_system.vc_type(): try: logger.trace( "expected_vc_type=[{expected_vc_type}], vcs_system=[{vc_type}], vcs_directory=[{directory}]" .format(expected_vc_type=as_info(str(expected_vc_type)), vc_type=as_info( vcs_system and vcs_system.vc_type() or "None"), directory=as_notice(str(vcs_directory)))) info = vcs_system.info(vcs_directory) logger.trace("vcs_info=[{vcs_info}]".format( vcs_info=as_info(str(info)))) return info except vcs_system.Error as ex: if expected_vc_type: logger.error( "Failed to retreive info for [{}] because [{}]".format( as_error(vcs_directory), as_error(str(ex)))) raise return None
def add_to_env( cls, env, add_toolchain, add_to_supported ): stdlib = None try: stdlib = env.get_option( 'clang-stdlib' ) suppress_debug_for_auto = env.get_option( 'clang-disable-debug-for-auto' ) except: pass for version in cls.supported_versions(): add_to_supported( version ) for version, clang in cls.available_versions().iteritems(): logger.debug( "Adding toolchain [{}] reported as [{}] with cxx_version [clang++{}] at [{}]".format( as_info(version), as_info(clang['version']), as_info(clang['cxx_version']), as_notice(clang['path']) ) ) add_toolchain( version, cls( version, clang['cxx_version'], clang['version'], clang['path'], stdlib, suppress_debug_for_auto ) )
def get_current_rev_info(path): logger.debug("Checking current revision info for [{}]...".format( as_info(path))) rev_info = None for scm_system in scms_systems.values(): try: rev_info = scm_system.info(path) break except: continue if rev_info: url, repo, branch, remote, rev = rev_info[0], rev_info[1], rev_info[ 2], rev_info[3], rev_info[4] logger.debug("Path [{path}] is under version control as" " URL [{url}], Repository [{repo}], Branch [{branch}]," " Remote [{remote}], Revision [{rev}]".format( path=as_notice(path), url=as_info(url), repo=as_info(repo), branch=branch and as_info(branch) or "<None>", remote=remote and as_info(remote) or "<None>", rev=remote and as_info(rev) or "<None>")) return url, repo, branch, remote, rev return None, None, None, None, None
def update( self, env, project, toolchain, variant, build_root, working_dir, final_dir_offset ): logger.debug( "Update project [{}] for [{}, {}]".format( as_notice( project ), as_notice( toolchain) , as_notice( variant ) ) ) logger.trace( "Update project [{}] working_dir [{}], final_dir [{}]".format( as_notice( project ), as_notice( working_dir) , as_notice( final_dir_offset ) ) ) if project not in self._projects: title = os.path.splitext( project )[0] directory, filename = os.path.split( title ) cbs_file_name = filename if cbs_file_name in [ 'sconscript', 'SConscript', 'Sconscript' ]: cbs_file_name = os.path.split( directory )[1] if cbs_file_name == ".": cbs_file_name = os.path.split( os.path.abspath( env['sconscript_dir'] ) )[1] if not cbs_file_name: cbs_file_name = "sconscript" if not self._place_cbs_by_sconscript: directory = env['working_dir'] directory = os.path.join( directory, "cbs") project_file = directory + os.path.sep + cbs_file_name + ".cbp" execution_dir = '' if directory: execution_dir = os.path.relpath( os.getcwd(), directory ) execution_dir = ( os.path.pardir + os.path.sep + os.path.join( execution_dir, os.path.split( os.path.abspath( os.getcwd() ) )[1] ) ) self._projects[project] = {} self._projects[project]['title'] = title self._projects[project]['directory'] = directory self._projects[project]['path'] = os.path.join( os.getcwd(), directory ) self._projects[project]['execution_dir'] = execution_dir self._projects[project]['project_file'] = project_file self._projects[project]['variants'] = set() self._projects[project]['toolchains'] = set() self._projects[project]['files'] = set() self._projects[project]['targets'] = {} self._projects[project]['lines_header'] = [] self._projects[project]['lines_footer'] = [] if not self._projects[project]['lines_header']: self._projects[project]['lines_header'] = self.create_header( self._projects[project]['title'], self._projects[project]['execution_dir'] ) if not self._projects[project]['lines_footer']: self._projects[project]['lines_footer'] = self.create_footer() self._projects[project]['variants'].add( variant ) self._projects[project]['toolchains'].add( toolchain ) working_dir_path = os.path.join( self._projects[project]['execution_dir'], working_dir ) final_dir_path = os.path.normpath( os.path.join( working_dir_path, final_dir_offset ) ) target = "{}-{}".format( toolchain, variant ) test_actions = [ "", "--test" ] for action in test_actions: target_name = target + action if target_name not in self._projects[project]['targets']: self._projects[project]['targets'][target_name] = self.create_target( target_name, project, toolchain, variant, action, working_dir_path, final_dir_path )
def __call__(self, target, source, env): executable = str(source[0].abspath) working_dir, test = os.path.split(executable) if self._working_dir: working_dir = self._working_dir program_path = source[0].path suite = env['build_dir'] if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable if self._command: test_command = self._command working_dir = self._working_dir and self._working_dir or self._final_dir test = os.path.relpath(executable, working_dir) test_suite = TestSuite.create(suite, env) test_case = test_suite.enter_test(test, expected=self._expected) show_test_output = env['show_test_output'] try: return_code = self._run_test(test_case, show_test_output, program_path, test_command, working_dir, env) if return_code == self._expected_exit_code: test_suite.exit_test(test_case, 'passed') elif return_code < 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) logger.error("Test was terminated by signal: {}".format( as_error(str(return_code)))) test_suite.exit_test(test_case, 'aborted') elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from(program_path)) logger.error("Test returned with error code: {}".format( as_error(str(return_code)))) test_suite.exit_test(test_case, 'failed') else: test_suite.exit_test(test_case, 'passed') cuppa.test_report.cuppa_json.write_report( report_file_name_from(program_path), test_suite.tests()) if return_code == self._expected_exit_code: self._write_success_file(success_file_name_from(program_path)) elif return_code: self._remove_success_file(success_file_name_from(program_path)) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code))) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code))) else: self._write_success_file(success_file_name_from(program_path)) return None except OSError, e: logger.error("Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)))) raise BuildError(e)
def create( cls, env ): boost_id = boost_location_id( env ) if not boost_id in cls._cached_boost_locations: logger.debug( "Adding boost [{}] to env".format( as_notice( str(boost_id) ) ) ) cls._cached_boost_locations[ boost_id ] = get_boost_location( env, boost_id[0], boost_id[1], boost_id[2], boost_id[3] ) location = cls._cached_boost_locations[ boost_id ] boost = None try: boost = cls( env, env[ 'platform' ], location ) except BoostException as e: logger.error( "Could not create boost dependency - {}".format(e) ) return None if not boost: logger.error( "Could not create boost dependency" ) return None build_always = env.get_option( 'boost-build-always' ) verbose_build = env.get_option( 'boost-verbose-build' ) verbose_config = env.get_option( 'boost-verbose-config' ) env.AddMethod( BoostStaticLibraryMethod( add_dependents=False, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostStaticLibrary" ) env.AddMethod( BoostSharedLibraryMethod( add_dependents=False, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostSharedLibrary" ) env.AddMethod( BoostStaticLibraryMethod( add_dependents=False, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostStaticLib" ) env.AddMethod( BoostSharedLibraryMethod( add_dependents=False, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostSharedLib" ) env.AddMethod( BoostStaticLibraryMethod( add_dependents=True, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostStaticLibs" ) env.AddMethod( BoostSharedLibraryMethod( add_dependents=True, build_always=build_always, verbose_build=verbose_build, verbose_config=verbose_config), "BoostSharedLibs" ) return boost
def __init__( self, cuppa_env, location, develop=None, branch_path=None, extra_sub_path=None, name_hint=None ): logger.debug( "Create location using location=[{}], develop=[{}], branch_path=[{}], extra_sub_path=[{}], name_hint=[{}]".format( as_info( location ), as_info( str(develop) ), as_info( str(branch_path) ), as_info( str(extra_sub_path) ), as_info( str(name_hint) ) ) ) self._cuppa_env = cuppa_env self._supports_relative_versioning = False self._current_branch = self._cuppa_env['current_branch'] self._current_revision = self._cuppa_env['current_revision'] self._offline = self.option_set('offline') offline = self._offline self._default_branch = self._cuppa_env['location_default_branch'] location = self.replace_sconstruct_anchor( location ) if develop: if not os.path.isabs( develop ): develop = '#' + develop develop = self.replace_sconstruct_anchor( develop ) logger.debug( "Develop location specified [{}]".format( as_info( develop ) ) ) if self.option_set('develop') and develop: location = develop logger.debug( "--develop specified so using location=develop=[{}]".format( as_info( develop ) ) ) scm_location = location if location[-1] == '@': self._supports_relative_versioning = True scm_location = location[:-1] scm_system, vc_type, repo_location, versioning = self.get_scm_system_and_info( self.expand_secret( scm_location ) ) logger.debug( "Local location and actions for [{location}] being determined in context:{offline}" " vc_type=[{vc_type}], repo_location=[{repo_location}]," " versioning=[{versioning}]".format( location = as_info(location), offline = self._offline and " " + as_info_label("OFFLINE") + "," or "", vc_type = as_info(str(vc_type)), repo_location = as_info(str(repo_location)), versioning = as_info(str(versioning)) ) ) if self._supports_relative_versioning: if self.location_match_current_branch(): if not scm_system: logger.warn( "Location [{}] specified using relative versioning, but no SCM system is available" " that matches the version control type [{}]. Relative versioning will be ignored" " for this location.".format( location, vc_type ) ) else: branch_exists = False logger.debug( "Relative branching active for [{location}] with" " current branch [{branch}] and current revision [{revision}]".format( location=as_info(str(location)), branch=as_info(str(self._current_branch)), revision=as_info(str(self._current_revision)) ) ) if self._current_branch: # Try to checkout on the explicit branch but if that fails fall back to # to the default by stripping off the '@' from the end of the path if not offline and scm_system.remote_branch_exists( repo_location, self._current_branch ): scm_location = location + self._current_branch logger.trace( "scm_location = [{scm_location}]".format( scm_location=as_info(str(scm_location)) ) ) elif self._current_revision: # Try to checkout on the explicit branch but if that fails fall back to # to the default by stripping off the '@' from the end of the path if not offline and scm_system.remote_branch_exists( repo_location, self._current_revision ): scm_location = location + self._current_revision logger.trace( "scm_location = [{scm_location}]".format( scm_location=as_info(str(scm_location)) ) ) elif scm_system and not offline: self._default_branch = scm_system.remote_default_branch( repo_location ) if self._default_branch: scm_location = location + self._default_branch elif( scm_system and not versioning and not offline and self.option_set('location_explicit_default_branch') ): self._default_branch = scm_system.remote_default_branch( repo_location ) if self._default_branch: scm_location = location + '@' + self._default_branch location = scm_location self._location = os.path.expanduser( location ) self._full_url = urlparse( self._location ) self._sub_dir = None self._name_hint = name_hint if extra_sub_path: if os.path.isabs( extra_sub_path ): raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) ) else: self._sub_dir = os.path.normpath( extra_sub_path ) ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source ## once this is done local_directory = self.get_local_directory( self._location, self._sub_dir, branch_path, self._full_url ) logger.trace( "Local Directory for [{}] returned as [{}]".format( as_notice( self._location ), as_notice( local_directory ) ) ) self._base_local_directory = local_directory self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory ## Now that we have a locally accessible version of the dependency we can try to collate some information ## about it to allow us to specify what we are building with. self._url, self._repository, self._branch, self._remote, self._revision = self.get_info( self._location, self._local_directory, self._full_url ) self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path ) logger.debug( "Using [{}]{}{} at [{}] stored in [{}]".format( as_info( location ), ( self._branch and ":[{}]".format( as_info( str(self._branch) ) ) or "" ), ( self._remote and " from [{}]".format( as_info( str(self._remote) ) ) or "" ), as_info( self._version ), as_notice( self._local_directory ) ) )
def call_project_sconscript_files(self, toolchain, variant, target_arch, abi, sconscript_env, project): sconscript_file = project if os.path.exists(sconscript_file) and os.path.isfile(sconscript_file): logger.debug( "project exists and added to build [{}] using [{},{},{}]". format(as_notice(sconscript_file), as_notice(toolchain.name()), as_notice(variant), as_notice(target_arch))) path_without_ext = os.path.splitext(sconscript_file)[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file) name = os.path.splitext(sconscript_name)[0] sconscript_env['sconscript_name_id'] = name if name.lower() == "sconscript": sconscript_env['sconscript_name_id'] = "" path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] working_folder = 'working' sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name()) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path) sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir']) sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi) sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder) build_base_path = os.path.join(path_without_ext, sconscript_env['tool_variant_dir']) def flatten_dir(directory, join_char="_"): return join_char.join( os.path.normpath(directory).split(os.path.sep)) sconscript_env['build_base_path'] = build_base_path sconscript_env['flat_build_base'] = flatten_dir(build_base_path) sconscript_env['tool_variant_build_dir'] = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder) sconscript_env['build_dir'] = os.path.normpath( os.path.join(build_root, build_base_path, working_folder, '')) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir']) sconscript_env['build_tool_variant_dir'] = os.path.normpath( os.path.join(build_root, sconscript_env['tool_variant_dir'], working_folder, '')) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir']) sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join(sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'])) sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join(flatten_dir(sconscript_env['tool_variant_dir']), sconscript_env['offset_dir'])) sconscript_env[ 'final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir(abs_build_dir, final_dir): return os.path.isabs( final_dir) and final_dir or os.path.normpath( os.path.join(abs_build_dir, final_dir)) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir']) sconscript_env.AppendUnique(INCPATH=[sconscript_env['offset_dir']]) sconscript_exports = { 'env': sconscript_env, 'sconscript_env': sconscript_env, 'build_root': build_root, 'build_dir': sconscript_env['build_dir'], 'abs_build_dir': sconscript_env['abs_build_dir'], 'final_dir': sconscript_env['final_dir'], 'abs_final_dir': sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project': name, } self._configure.configure(sconscript_exports['env']) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports) if sconscript_env['dump']: logger.info("{} {}".format( as_info_label("Dumping ENV for"), as_info(sconscript_exports['build_dir']))) dump = sconscript_env.Dump() logger.info("\n" + dump + "\n") else: SCons.Script.SConscript( [sconscript_file], variant_dir=sconscript_exports['build_dir'], duplicate=0, exports=sconscript_exports) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error(sconscript_file), as_error(toolchain.name()), as_error(variant), as_error(target_arch)))
def __init__(self, cuppa_env, location, develop=None, branch=None, extra_sub_path=None, name_hint=None): logger.debug( "Create location using location=[{}], develop=[{}], branch=[{}], extra_sub_path=[{}], name_hint=[{}]" .format(as_info(location), as_info(str(develop)), as_info(str(branch)), as_info(str(extra_sub_path)), as_info(str(name_hint)))) location = self.replace_sconstruct_anchor(location, cuppa_env) if develop: if not os.path.isabs(develop): develop = '#' + develop develop = self.replace_sconstruct_anchor(develop, cuppa_env) logger.debug("Develop location specified [{}]".format( as_info(develop))) if 'develop' in cuppa_env and cuppa_env['develop'] and develop: location = develop logger.debug( "--develop specified so using location=develop=[{}]".format( as_info(develop))) self._location = os.path.expanduser(location) self._full_url = urlparse(self._location) self._sub_dir = None self._name_hint = name_hint self._expanded_location = None self._plain_location = "" if extra_sub_path: if os.path.isabs(extra_sub_path): raise LocationException( "Error extra sub path [{}] is not relative".format( extra_sub_path)) else: self._sub_dir = os.path.normpath(extra_sub_path) ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source ## once this is done local_directory = self.get_local_directory(cuppa_env, self._location, self._sub_dir, branch, self._full_url) logger.trace("Local Directory for [{}] returned as [{}]".format( as_notice(self._location), as_notice(local_directory))) self._base_local_directory = local_directory self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir) or local_directory ## Now that we have a locally accessible version of the dependency we can try to collate some information ## about it to allow us to specify what we are building with. self._url, self._repository, self._branch, self._remote, self._revision = self.get_info( self._location, self._local_directory, self._full_url) self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path) logger.debug("Using [{}]{}{} at [{}] stored in [{}]".format( as_info(location), (self._branch and ":[{}]".format(as_info(str(self._branch))) or ""), (self._remote and " from [{}]".format(as_info(str(self._remote))) or ""), as_info(self._version), as_notice(self._local_directory)))
def get_boost_location( env, location, version, base, patched ): logger.debug( "Identify boost using location = [{}], version = [{}], base = [{}], patched = [{}]".format( as_info( str(location) ), as_info( str(version) ), as_info( str(base) ), as_info( str(patched) ) ) ) boost_home = None boost_location = None extra_sub_path = 'clean' if patched: extra_sub_path = 'patched' offline = env['offline'] if location: location = _location_from_boost_version( location, offline ) logger.trace( "Location after version detection = [{}]".format( as_notice( str(location) ) ) ) if not location: # use version as a fallback in case both at specified location = _location_from_boost_version( version, offline ) boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path, name_hint="boost" ) elif base: # Find boost locally if not os.path.isabs( base ): base = os.path.abspath( base ) if not version: boost_home = base elif version: search_list = [ os.path.join( base, 'boost', version, 'source' ), os.path.join( base, 'boost', 'boost_' + version ), os.path.join( base, 'boost', version ), os.path.join( base, 'boost_' + version ), ] def exists_in( locations ): for location in locations: home = _home_from_path( location ) if home: return home return None boost_home = exists_in( search_list ) if not boost_home: raise BoostException("Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]".format(version, str([l for l in search_list]))) else: raise BoostException("Cannot construct Boost Object. No Home or Version specified") logger.debug( "Using boost found at [{}]".format( as_info( boost_home ) ) ) boost_location = cuppa.location.Location( env, boost_home, extra_sub_path=extra_sub_path ) else: location = _location_from_boost_version( version, offline ) boost_location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path ) if patched: apply_patch_if_needed( boost_location.local(), get_boost_version ( boost_location.local() )[0] ) return boost_location
def __init__(self, sconstruct_path, base_path=os.path.abspath('.'), branch_root=None, default_options={}, default_projects=[], default_variants=[], default_dependencies=[], default_profiles=[], dependencies=[], profiles=[], default_runner=None, configure_callback=None, tools=[]): cuppa.core.base_options.set_base_options() cuppa_env = cuppa.core.environment.CuppaEnvironment() cuppa_env.add_tools(tools) dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies") profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles") self.initialise_options(cuppa_env, default_options, profiles, dependencies) cuppa_env['configured_options'] = {} self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback) enable_thirdparty_logging( cuppa_env.get_option('enable-thirdparty-logging') and True or False) self._set_verbosity_level(cuppa_env) cuppa_env['sconstruct_path'] = sconstruct_path cuppa_env['sconstruct_dir'], cuppa_env[ 'sconstruct_file'] = os.path.split(sconstruct_path) self._set_output_format(cuppa_env) self._configure.load() cuppa_env['offline'] = cuppa_env.get_option('offline') cuppa.version.check_current_version(cuppa_env['offline']) if cuppa_env['offline']: logger.info(as_info_label("Running in OFFLINE mode")) logger.info("using sconstruct file [{}]".format( as_notice(cuppa_env['sconstruct_file']))) if dependencies_warning: logger.warn(dependencies_warning) if profiles_warning: logger.warn(profiles_warning) help = cuppa_env.get_option('help') and True or False cuppa_env['minimal_output'] = cuppa_env.get_option('minimal_output') cuppa_env['ignore_duplicates'] = cuppa_env.get_option( 'ignore_duplicates') cuppa_env['working_dir'] = os.getcwd() cuppa_env['launch_dir'] = os.path.relpath(SCons.Script.GetLaunchDir(), cuppa_env['working_dir']) cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "." cuppa_env['launch_offset_dir'] = "." if not cuppa_env['run_from_launch_dir']: levels = len(cuppa_env['launch_dir'].split(os.path.sep)) cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)]) cuppa_env['base_path'] = os.path.normpath( os.path.expanduser(base_path)) cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser(branch_root)) or base_path cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root']) or None thirdparty = cuppa_env.get_option('thirdparty') if thirdparty: thirdparty = os.path.normpath(os.path.expanduser(thirdparty)) cuppa_env['thirdparty'] = thirdparty cuppa.core.storage_options.process_storage_options(cuppa_env) cuppa.core.location_options.process_location_options(cuppa_env) cuppa_env['current_branch'] = '' cuppa_env['current_revision'] = '' if not help and not self._configure.handle_conf_only(): if cuppa_env['location_match_current_branch']: url, repo, branch, remote, rev = cuppa.scms.scms.get_current_rev_info( cuppa_env['sconstruct_dir']) if branch: cuppa_env['current_branch'] = branch if rev: cuppa_env['current_revision'] = rev logger.info( "Current build on branch [{}] at revision [{}] from remote [{}] in [{}] at [{}]" .format(as_info(str(branch)), as_info(str(rev)), as_info(str(remote)), as_info(str(repo)), as_info(str(url)))) cuppa_env['default_projects'] = default_projects cuppa_env['default_variants'] = default_variants and set( default_variants) or set() cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or [] cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies'] cuppa_env['dependencies'] = {} cuppa_env[ 'default_profiles'] = default_profiles and default_profiles or [] cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles'] cuppa_env['profiles'] = {} test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process') cuppa_env['default_runner'] = test_runner cuppa_env['propagate_env'] = cuppa_env.get_option( 'propagate-env') and True or False cuppa_env['propagate_path'] = cuppa_env.get_option( 'propagate-path') and True or False cuppa_env['merge_path'] = cuppa_env.get_option( 'merge-path') and True or False cuppa_env['show_test_output'] = cuppa_env.get_option( 'show-test-output') and True or False cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output') and True or False cuppa_env['dump'] = cuppa_env.get_option('dump') and True or False cuppa_env['clean'] = cuppa_env.get_option('clean') and True or False self.add_variants(cuppa_env) self.add_toolchains(cuppa_env) self.add_platforms(cuppa_env) cuppa_env['platform'] = cuppa.build_platform.Platform.current() toolchains = cuppa_env.get_option('toolchains') cuppa_env['target_architectures'] = None if not help and not self._configure.handle_conf_only(): default_toolchain = cuppa_env['platform'].default_toolchain() if not toolchains: toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ] else: toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ] cuppa_env['active_toolchains'] = toolchains def add_profile(name, profile): cuppa_env['profiles'][name] = profile def add_dependency(name, dependency): cuppa_env['dependencies'][name] = dependency cuppa.modules.registration.get_options("methods", cuppa_env) if not help and not self._configure.handle_conf_only(): cuppa_env[self.project_generators_key] = {} cuppa.modules.registration.add_to_env("dependencies", cuppa_env, add_dependency) cuppa.modules.registration.add_to_env("profiles", cuppa_env, add_profile) cuppa.modules.registration.add_to_env("methods", cuppa_env) cuppa.modules.registration.add_to_env("project_generators", cuppa_env) for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None): method_plugin.load().add_to_env(cuppa_env) for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None): profile_plugin.load().add_to_env(cuppa_env) if profiles: for profile in profiles: profile.add_to_env(cuppa_env, add_profile) logger.trace("available profiles are [{}]".format( colour_items(sorted(cuppa_env["profiles"].keys())))) logger.info("default profiles are [{}]".format( colour_items(sorted(cuppa_env["default_profiles"]), as_info))) for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None): dependency_plugin.load().add_to_env( cuppa_env, add_dependency) if dependencies: for dependency in dependencies: dependency.add_to_env(cuppa_env, add_dependency) logger.trace("available dependencies are [{}]".format( colour_items(sorted(cuppa_env["dependencies"].keys())))) logger.info("default dependencies are [{}]".format( colour_items(sorted(cuppa_env["default_dependencies"]), as_info))) # TODO - default_profile if cuppa_env['dump']: logger.info( as_info_label( "Running in DUMP mode, no building will be attempted")) cuppa_env.dump() job_count = cuppa_env.get_option('num_jobs') parallel = cuppa_env.get_option('parallel') parallel_mode = "manually" if job_count == 1 and parallel: job_count = multiprocessing.cpu_count() if job_count > 1: SCons.Script.SetOption('num_jobs', job_count) parallel_mode = "automatically" cuppa_env['job_count'] = job_count cuppa_env['parallel'] = parallel if job_count > 1: logger.info( "Running in {} with option [{}] set {} as [{}]".format( as_emphasised("parallel mode"), as_info("jobs"), as_emphasised(parallel_mode), as_info(str(SCons.Script.GetOption('num_jobs'))))) if not help and self._configure.handle_conf_only(): self._configure.save() if not help and not self._configure.handle_conf_only(): self.build(cuppa_env) if self._configure.handle_conf_only(): print( "cuppa: Handling configuration only, so no builds will be attempted." ) print( "cuppa: With the current configuration executing 'scons -D' would be equivalent to:" ) print("") print("scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options']))) print("") print("cuppa: Nothing to be done. Exiting.") SCons.Script.Exit()
def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir, test = os.path.split( executable ) if self._working_dir: working_dir = self._working_dir program_path = source[0].path suite = env['build_dir'] if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable if self._command: test_command = self._command working_dir = self._working_dir and self._working_dir or self._final_dir test = os.path.relpath( executable, working_dir ) test_suite = TestSuite.create( suite, env ) test_case = test_suite.enter_test( test, expected=self._expected ) show_test_output = env['show_test_output'] try: return_code = self._run_test( test_case, show_test_output, program_path, test_command, working_dir, env ) if return_code == self._expected_exit_code: test_suite.exit_test( test_case, 'passed' ) elif return_code < 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test was terminated by signal: {}".format( as_error(str(return_code) ) ) ) test_suite.exit_test( test_case, 'aborted' ) elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) logger.error( "Test returned with error code: {}".format( as_error(str(return_code) ) ) ) test_suite.exit_test( test_case, 'failed' ) else: test_suite.exit_test( test_case, 'passed' ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), test_suite.tests() ) if return_code == self._expected_exit_code: self._write_success_file( success_file_name_from( program_path ) ) elif return_code: self._remove_success_file( success_file_name_from( program_path ) ) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code) ) ) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code) ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError, e: logger.error( "Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)) ) ) raise BuildError( e )
def __call__(self, env, target, source, libraries, linktype): logger.trace("Build Dir = [{}]".format(as_info(env['build_dir']))) logger.trace("Requested libraries = [{}]".format( colour_items(libraries))) variant = variant_name(env['variant'].name()) target_arch = env['target_arch'] toolchain = env['toolchain'] stage_dir = stage_directory(toolchain, variant, target_arch, toolchain.abi_flag(env)) variant_key = stage_dir logger.trace("Prebuilt Libraries Variant Key = [{}]".format( as_notice(variant_key))) library_action = BoostLibraryAction(env, stage_dir, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config) library_emitter = BoostLibraryEmitter(env, stage_dir, libraries, self._add_dependents, linktype, self._boost) logger.trace("Action Prebuilt Libraries for [{}] = {}".format( as_info(variant_key), colour_items(BoostLibraryAction.prebuilt_libraries[variant_key]))) logger.trace("Emitter Prebuilt Libraries for [{}] = {}".format( as_info(variant_key), colour_items(BoostLibraryEmitter.prebuilt_libraries[variant_key]))) env.AppendUnique( BUILDERS={ 'BoostLibraryBuilder': env.Builder(action=library_action, emitter=library_emitter) }) built_libraries = env.BoostLibraryBuilder(target, source) built_libraries_map = { extract_library_name_from_path(l): l for l in built_libraries } logger.trace("Libraries to be built = [{}]".format( colour_items(built_libraries_map.keys()))) if not variant_key in self._prebuilt_libraries: self._prebuilt_libraries[variant_key] = {} logger.trace("Variant sources = [{}]".format( colour_items(self._prebuilt_libraries[variant_key].keys()))) required_libraries = add_dependent_libraries(self._boost, linktype, libraries) logger.trace("Required libraries = [{}]".format( colour_items(required_libraries))) for library in required_libraries: if library in self._prebuilt_libraries[variant_key]: logger.trace( "Library [{}] already present in variant [{}]".format( as_notice(library), as_info(variant_key))) #if library not in built_libraries_map: # The Depends is required regardless so SCons knows about the relationship logger.trace("Add Depends for [{}]".format( as_notice( self._prebuilt_libraries[variant_key][library].path))) env.Depends(built_libraries, self._prebuilt_libraries[variant_key][library]) else: self._prebuilt_libraries[variant_key][ library] = built_libraries_map[library] logger.trace("Library sources for variant [{}] = [{}]".format( as_info(variant_key), colour_items(k + ":" + as_info(v.path) for k, v in self._prebuilt_libraries[variant_key].iteritems()))) bjam = env.Command(bjam_exe(self._boost), [], BuildBjam(self._boost)) env.NoClean(bjam) if built_libraries: env.Requires(built_libraries, bjam) if cuppa.build_platform.name() == "Linux": toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam") toolset_config_jam = env.Command(toolset_target, [], WriteToolsetConfigJam()) project_config_target = os.path.join(self._boost.local(), "project-config.jam") if not os.path.exists(project_config_target): project_config_jam = env.Requires( project_config_target, env.AlwaysBuild(toolset_config_jam)) env.Requires(built_libraries, project_config_jam) env.Requires(built_libraries, toolset_config_jam) install_dir = linktype == 'shared' and env['abs_final_dir'] or env[ 'abs_build_dir'] installed_libraries = [] for library in required_libraries: logger.debug("Install Boost library [{}:{}] to [{}]".format( as_notice(library), as_info(str(self._prebuilt_libraries[variant_key][library])), as_notice(install_dir))) library_node = self._prebuilt_libraries[variant_key][library] logger.trace( "Library Node = \n[{}]\n[{}]\n[{}]\n[{}]\n[{}]".format( as_notice(library_node.path), as_notice(str(library_node)), as_notice(str(library_node.get_binfo().bact)), as_notice(str(library_node.get_state())), as_notice(str(library_node.srcnode())))) installed_library = env.CopyFiles( install_dir, self._prebuilt_libraries[variant_key][library]) installed_libraries.append(installed_library) logger.debug("Boost 'Installed' Libraries = [{}]".format( colour_items(l.path for l in Flatten(installed_libraries)))) return Flatten(installed_libraries)
def update( self, env, project, toolchain, variant, build_root, working_dir, final_dir_offset ): logger.debug( "Update project [{}] for [{}, {}]".format( as_notice( project ), as_notice( toolchain) , as_notice( variant ) ) ) logger.trace( "Update project [{}] working_dir [{}], final_dir [{}]".format( as_notice( project ), as_notice( working_dir) , as_notice( final_dir_offset ) ) ) if project not in self._projects: title = os.path.splitext( project )[0] directory, filename = os.path.split( title ) cbs_file_name = filename if cbs_file_name in [ 'sconscript', 'SConscript', 'Sconscript' ]: cbs_file_name = os.path.split( directory )[1] if cbs_file_name == ".": cbs_file_name = os.path.split( os.path.abspath( env['sconscript_dir'] ) )[1] if not cbs_file_name: cbs_file_name = "sconscript" if not self._place_cbs_by_sconscript: directory = env['working_dir'] directory = os.path.join( directory, "cbs") project_file = directory + os.path.sep + cbs_file_name + ".cbp" execution_dir = '' if directory: execution_dir = os.path.relpath( os.getcwd(), directory ) execution_dir = ( os.path.pardir + os.path.sep + os.path.join( execution_dir, os.path.split( os.path.abspath( os.getcwd() ) )[1] ) ) self._projects[project] = {} self._projects[project]['title'] = title self._projects[project]['directory'] = directory self._projects[project]['path'] = os.path.join( os.getcwd(), directory ) self._projects[project]['execution_dir'] = execution_dir self._projects[project]['project_file'] = project_file self._projects[project]['variants'] = set() self._projects[project]['toolchains'] = set() self._projects[project]['files'] = set() self._projects[project]['targets'] = {} self._projects[project]['lines_header'] = [] self._projects[project]['lines_footer'] = [] if not self._projects[project]['lines_header']: self._projects[project]['lines_header'] = self.create_header( self._projects[project]['title'], self._projects[project]['execution_dir'] ) if not self._projects[project]['lines_footer']: self._projects[project]['lines_footer'] = self.create_footer() self._projects[project]['variants'].add( variant ) self._projects[project]['toolchains'].add( toolchain ) self._projects[project]['search_paths'] = set() self._projects[project]['sys_search_paths'] = set() for name in env['BUILD_WITH']: logger.trace( "Reading search paths for dependency [{}]".format( name ) ) if name in env['dependencies']: dependency_factory = env['dependencies'][name] dependency = dependency_factory( env ) if hasattr( dependency, "includes"): for include in dependency.includes(): self._projects[project]['search_paths'].add( include ) logger.trace( "...adding search path [{}] for dependency [{}]".format( include, name ) ) if hasattr( dependency, "sys_includes"): for sys_include in dependency.sys_includes(): self._projects[project]['sys_search_paths'].add( sys_include ) logger.trace( "...adding search path [{}] for dependency [{}]".format( sys_include, name ) ) self._projects[project]['search_paths'] = sorted( [ path for path in self._projects[project]['search_paths'] ] ) self._projects[project]['sys_search_paths'] = sorted( [ path for path in self._projects[project]['sys_search_paths'] ] ) self._projects[project]['extensions_block'] = self.create_extensions_block( project ) working_dir_path = os.path.join( self._projects[project]['execution_dir'], working_dir ) final_dir_path = os.path.normpath( os.path.join( working_dir_path, final_dir_offset ) ) target = "{}-{}".format( toolchain, variant ) test_actions = [ "", "--test" ] for action in test_actions: target_name = target + action if target_name not in self._projects[project]['targets']: self._projects[project]['targets'][target_name] = self.create_target( target_name, project, toolchain, variant, action, working_dir_path, final_dir_path )
def update_index(cls, json_report, destination): logger.trace("add destination = [{}]".format(as_notice(destination))) if not destination in cls.all_reports: cls.all_reports[destination] = [] cls.all_reports[destination].append(json_report)
def lazy_create_path( path ): if not os.path.exists( path ): try: os.makedirs( path ) except os.error as e: if not os.path.exists( path ): logger.error( "Could not create path [{}]. Failed with error [{}]".format( as_notice(path), as_error(str(e)) ) )
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip.download.url_to_path( location ) if not pip.download.is_url( location ): if pip.download.is_archive_file( location ): local_folder = self.folder_name_from_path( location ) local_directory = os.path.join( base, local_folder ) if os.path.exists( local_directory ): try: os.rmdir( local_directory ) except: return local_directory, False self.extract( location, local_directory ) else: local_directory = branch and os.path.join( location, branch ) or location return local_directory, False else: local_folder = self.folder_name_from_path( full_url ) local_directory = os.path.join( base, local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory return local_directory, True # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip.vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( location ) rev_options = self.get_rev_options( vc_type, vcs_backend ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir ) if os.path.exists( local_directory ): url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] logger.debug( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: vcs_backend.update( local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: action = "Cloning" if vc_type == "svn": action = "Checking out" logger.info( "{} [{}] into [{}]".format( action, as_info( location ), as_info( local_dir_with_sub_dir ) ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) except pip.exceptions.InstallationError as error: logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_error( location ), as_error( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_error( str(rev_options) ) ) or ""), as_error( str( error ) ) ) ) raise LocationException( "Error obtaining [{}]: {}".format( location, error ) ) return local_directory, True
def __call__(self, target, source, env): executable = str(source[0].abspath) working_dir = self._working_dir and self._working_dir or os.path.split( executable)[0] program_path = source[0].path notifier = Notify(env, env['show_test_output']) if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' boost_version = None preprocess = self.default_preprocess argument_prefix = "" if 'boost' in env['dependencies']: boost_version = env['dependencies']['boost'](env).numeric_version() if env['dependencies']['boost'](env).patched_test(): argument_prefix = "boost.test." test_command = executable + " --{0}log_format=hrf --{0}log_level=test_suite --{0}report_level=no".format( argument_prefix) if boost_version: if boost_version >= 1.67: preprocess = cuppa.utility.preprocess.AnsiEscape.strip test_command = executable + " --{0}log_format=HRF --{0}log_level=all --{0}report_level=no --{0}color_output=no".format( argument_prefix) elif boost_version >= 1.60: test_command = executable + " --{0}log_format=HRF --{0}log_level=test_suite --{0}report_level=no".format( argument_prefix) try: return_code, tests = self._run_test(program_path, test_command, working_dir, env['branch_root'], notifier, preprocess, env) cuppa.test_report.cuppa_json.write_report( report_file_name_from(program_path), tests) if return_code < 0: self._write_file_to_stderr(stderr_file_name_from(program_path)) logger.error("Test was terminated by signal: {}".format( as_notice(str(-return_code)))) elif return_code > 0: self._write_file_to_stderr(stderr_file_name_from(program_path)) logger.error("Test returned with error code: {}".format( as_notice(str(return_code)))) elif notifier.master_suite['status'] != 'passed': logger.error("Not all test suites passed") raise BuildError(node=source[0], errstr="Not all test suites passed") if return_code: self._remove_success_file(success_file_name_from(program_path)) if return_code < 0: raise BuildError( node=source[0], errstr="Test was terminated by signal: {}".format( str(-return_code))) else: raise BuildError( node=source[0], errstr="Test returned with error code: {}".format( str(return_code))) else: self._write_success_file(success_file_name_from(program_path)) return None except OSError as e: logger.error("Execution of [{}] failed with error: {}".format( as_notice(test_command), as_notice(str(e)))) raise BuildError(e)
def get_branch(cls, path): branch = None remote = None head_detached = False command = "{git} branch".format(git=cls.binary()) branch_info = cls.execute_command(command, path) if branch_info: match = re.search(r'^[*] [(]HEAD detached ', branch_info) if match: head_detached = True if not head_detached: result = cls.execute_command( "{git} status -sb".format(git=cls.binary()), path) if result: match = re.search( r'## (?P<branch>[^)]+)[.][.][.](?P<remote>[^)\n]+)', result) if match: branch = match.group("branch") remote = match.group("remote") match = re.search(r'## HEAD (no branch)', result) # Check if we are rebasing if match: command = "{git} branch".format(git=cls.binary()) branch_info = cls.execute_command(command, path) if branch_info: match = re.search( r'(no branch, rebasing (?P<branch>[^)]+))', branch_info) if match: branch = match.group("branch") logger.warn( as_warning( "Currently rebasing branch [{}]".format( branch))) return branch, remote else: result = cls.execute_command( "{git} show -s --pretty=\%d --decorate=full HEAD".format( git=cls.binary()), path) match = re.search(r'HEAD(?:(?:[^ ]* -> |[^,]*, )(?P<refs>[^)]+))?', result) if match and match.group("refs"): refs = [{ "ref": r.strip(), "type": "" } for r in match.group("refs").split(',')] logger.trace("Refs (using show) for [{}] are [{}]".format( as_notice(path), colour_items((r["ref"] for r in refs)))) if refs: for ref in refs: if ref["ref"].startswith("refs/heads/"): ref["ref"] = ref["ref"][len("refs/heads/"):] ref["type"] = "L" elif ref["ref"].startswith("refs/tags/"): ref["ref"] = ref["ref"][len("refs/tags/"):] ref["type"] = "T" elif ref["ref"].startswith("tag: refs/tags/"): ref["ref"] = ref["ref"][len("tag: refs/tags/"):] ref["type"] = "T" elif ref["ref"].startswith("refs/remotes/"): ref["ref"] = ref["ref"][len("refs/remotes/"):] ref["type"] = "R" else: ref["type"] = "U" logger.trace( "Refs (after classification) for [{}] are [{}]".format( as_notice(path), colour_items((":".join([r["type"], r["ref"]]) for r in refs)))) if refs[0]["type"] == "L": branch = refs[0]["ref"] #elif refs[0]["type"] == "T": #branch = refs[0]["ref"] elif refs[0]["type"] == "R": branch = refs[0]["ref"].split('/')[1] remote = next( (ref["ref"] for ref in refs if ref["type"] == "R"), None) logger.trace("Branch (using show) for [{}] is [{}]".format( as_notice(path), as_info(str(branch)))) else: if result == "(HEAD)": command = "{git} branch".format(git=cls.binary()) branch_info = cls.execute_command(command) if branch_info: match = re.search( r'(no branch, rebasing (?P<branch>[^)]+))', branch_info) if match: branch = match.group("branch") logger.warn( as_warning( "Currently rebasing branch [{}]".format( branch))) #if not branch: #logger.warn( as_warning( "No branch found from [{}]".format( result ) ) ) return branch, remote
def _update_conf(self): logger.info("{}".format(as_notice("Updating current settings..."))) self._save_settings() logger.info("{}".format(as_notice("Update complete")))
def create_build_envs(self, toolchain, cuppa_env): propagate_environment = cuppa_env['propagate_env'] propagate_path = cuppa_env['propagate_path'] merge_path = cuppa_env['merge_path'] variants = cuppa_env[self.variants_key] actions = cuppa_env[self.actions_key] target_architectures = cuppa_env['target_architectures'] if not target_architectures: target_architectures = [None] def get_active_from_options(tasks): active_tasks = {} for key, task in tasks.items(): if cuppa_env.get_option(task.name()): active_tasks[task.name()] = task return active_tasks active_variants = get_active_from_options(variants) active_actions = get_active_from_options(actions) def get_active_from_defaults(default_tasks, tasks): active_tasks = {} for task in default_tasks: if task in tasks.keys(): active_tasks[task] = tasks[task] return active_tasks if not active_variants and not active_actions: default_variants = cuppa_env[ 'default_variants'] or toolchain.default_variants() if default_variants: active_variants = get_active_from_defaults( default_variants, variants) active_actions = get_active_from_defaults( default_variants, actions) if active_variants: logger.info( "Default build variants of [{}] being used.".format( colour_items(active_variants, as_info))) if active_actions: logger.info( "Default build actions of [{}] being used.".format( colour_items(active_actions, as_info))) if not active_variants: active_variants = get_active_from_defaults( toolchain.default_variants(), variants) logger.info( "No active variants specified so toolchain defaults of [{}] being used." .format(colour_items(active_variants, as_info))) logger.debug("Using active_variants = [{}]".format( colour_items(active_variants, as_info))) logger.debug("Using active_actions = [{}]".format( colour_items(active_actions, as_info))) build_envs = [] for key, variant in active_variants.items(): for target_arch in target_architectures: env, target_arch = toolchain.make_env(cuppa_env, variant, target_arch) if env: # TODO: Refactor this code out if propagate_environment or propagate_path or merge_path: def merge_paths(default_paths, env_paths): path_set = set(default_paths + env_paths) def record_path(path): path_set.discard(path) return path return [ record_path(p) for p in default_paths + env_paths if p in path_set ] def get_paths_from(environment): return 'PATH' in environment and environment[ 'PATH'].split(os.pathsep) or [] default_paths = get_paths_from(env['ENV']) env_paths = get_paths_from(os.environ) if propagate_environment: env['ENV'] = os.environ.copy() logger.debug( "propagating environment for [{}:{}] to all subprocesses: [{}]" .format(variant.name(), target_arch, as_notice(str(env['ENV'])))) if propagate_path and not propagate_environment: env['ENV']['PATH'] = env_paths logger.debug( "propagating PATH for [{}:{}] to all subprocesses: [{}]" .format(variant.name(), target_arch, colour_items(env_paths))) elif merge_path: merged_paths = merge_paths(default_paths, env_paths) env['ENV']['PATH'] = os.pathsep.join(merged_paths) logger.debug( "merging PATH for [{}:{}] to all subprocesses: [{}]" .format(variant.name(), target_arch, colour_items(merged_paths))) build_envs.append({ 'variant': key, 'target_arch': target_arch, 'abi': toolchain.abi(env), 'env': env }) if not cuppa_env['raw_output']: cuppa.output_processor.Processor.install(env) env['toolchain'] = toolchain env['variant'] = variant env['target_arch'] = target_arch env['abi'] = toolchain.abi(env) env['variant_actions'] = self.get_active_actions( cuppa_env, variant, active_variants, active_actions) return build_envs
def __call__(self, target, source, env): command = None working_dir = None program_path = None if self._command and callable(self._command): program_path = os.path.splitext( os.path.splitext(str(target[0]))[0])[0] monitor = Monitor(program_path, env) monitor.start() result = self._command(target, source, env) if result == 0 or result == None: self._write_success_file(success_file_name_from(program_path)) monitor.stop('success') else: self._remove_success_file(success_file_name_from(program_path)) monitor.stop('failed') else: if self._command: command = self._command if self._format_args: format_args = {} for key, value in self._format_args.iteritems(): format_args[key] = callable(value) and value() or value command = command.format(**format_args) working_dir = self._working_dir and self._working_dir or self._final_dir program_path = os.path.splitext( os.path.splitext(str(target[0]))[0])[0] else: executable = str(source[0].abspath) working_dir, test = os.path.split(executable) if self._working_dir: working_dir = self._working_dir program_path = source[0].path if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable if self._command: command = self._command working_dir = self._working_dir and self._working_dir or self._final_dir suppress_output = env['suppress_process_output'] retry_count = self._retry_count while retry_count >= 0: retry = (retry_count > 0) success = self._run_command(source, suppress_output, program_path, command, working_dir, env, retry) if not success and retry: logger.info("Retrying [{}]...".format(as_notice(command))) else: break retry_count = retry_count - 1 return None
def get_local_directory(self, cuppa_env, location, sub_dir, branch, full_url): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs(base): base = os.path.join(cuppa_env['working_dir'], base) if location.startswith('file:'): location = pip_download.url_to_path(location) if not pip_is_url(location): if pip_is_archive_file(location): self._local_folder = self.folder_name_from_path( location, cuppa_env) local_directory = os.path.join(base, self._local_folder) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if os.path.exists(local_dir_with_sub_dir): try: os.rmdir(local_dir_with_sub_dir) except: return local_directory self.extract(location, local_dir_with_sub_dir) logger.debug("(local archive) Location = [{}]".format( as_info(location))) logger.debug("(local archive) Local folder = [{}]".format( as_info(self._local_folder))) else: local_directory = branch and os.path.join(location, branch) or location self._local_folder = self.folder_name_from_path( location, cuppa_env) logger.debug("(local file) Location = [{}]".format( as_info(location))) logger.debug("(local file) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env) local_directory = os.path.join(base, self._local_folder) if full_url.scheme.startswith( 'http') and self.url_is_download_archive_url( full_url.path): logger.debug("[{}] is an archive download".format( as_info(location))) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info(location))) logger.debug( "(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format( as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urlretrieve(location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info( "[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder) logger.debug( "Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend(vc_type) if backend: try: vcs_backend = backend(self.expand_secret(location)) except: # Pip version >= 19 backend.url = self.expand_secret(location) vcs_backend = backend local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists(local_directory): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type) rev_options = self.get_rev_options(vc_type, vcs_backend, local_remote=remote) version = self.ver_rev_summary(branch, revision, self._full_url.path)[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " on {}".format( as_notice(str(rev_options))) or ""), as_info(version))) try: update(vcs_backend, local_dir_with_sub_dir, rev_options) logger.debug( "Successfully updated [{}]".format( as_info(location))) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]" .format(as_warning(location), as_warning(local_dir_with_sub_dir), (rev_options and " at {}".format( as_warning(str(rev_options))) or ""), as_warning(str(error)))) else: logger.debug( "Skipping update for [{}] as running in offline mode" .format(as_info(location))) else: rev_options = self.get_rev_options( vc_type, vcs_backend) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info("{} [{}] into [{}]{}".format( action, as_info(location), as_info(local_dir_with_sub_dir), attempt > 1 and "(attempt {})".format(str(attempt)) or "")) try: obtain(vcs_backend, local_dir_with_sub_dir, vcs_backend.url) logger.debug( "Successfully retrieved [{}]".format( as_info(location))) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]" .format(as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " to {}".format( as_notice(str(rev_options))) or ""), as_error(str(error)))) if attempt > max_attempts: raise LocationException(str(error)) logger.debug("(url path) Location = [{}]".format( as_info(location))) logger.debug("(url path) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory
def get_boost_location(env, location, version, base, patched): logger.debug( "Identify boost using location = [{}], version = [{}], base = [{}], patched = [{}]" .format(as_info(str(location)), as_info(str(version)), as_info(str(base)), as_info(str(patched)))) boost_home = None boost_location = None extra_sub_path = 'clean' if patched: extra_sub_path = 'patched' offline = env['offline'] if location: location = _location_from_boost_version(location, offline) logger.trace("Location after version detection = [{}]".format( as_notice(str(location)))) if not location: # use version as a fallback in case both at specified location = _location_from_boost_version(version, offline) boost_location = cuppa.location.Location(env, location, extra_sub_path=extra_sub_path, name_hint="boost") elif base: # Find boost locally if not os.path.isabs(base): base = os.path.abspath(base) if not version: boost_home = base elif version: search_list = [ os.path.join(base, 'boost', version, 'source'), os.path.join(base, 'boost', 'boost_' + version), os.path.join(base, 'boost', version), os.path.join(base, 'boost_' + version), ] def exists_in(locations): for location in locations: home = _home_from_path(location) if home: return home return None boost_home = exists_in(search_list) if not boost_home: raise BoostException( "Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]" .format(version, str([l for l in search_list]))) else: raise BoostException( "Cannot construct Boost Object. No Home or Version specified") logger.debug("Using boost found at [{}]".format(as_info(boost_home))) boost_location = cuppa.location.Location(env, boost_home, extra_sub_path=extra_sub_path) else: location = _location_from_boost_version(version, offline) boost_location = cuppa.location.Location(env, location, extra_sub_path=extra_sub_path) if patched: apply_patch_if_needed(boost_location.local(), get_boost_version(boost_location.local())[0]) return boost_location
def add_to_env( cls, env, add_toolchain, add_to_supported ): for version in cls.supported_versions(): add_to_supported( version ) for version, gcc in cls.available_versions().iteritems(): logger.debug( "Adding toolchain [{}] reported as [{}] with cxx_version [g++{}] at [{}]" .format( as_info(version), as_info(gcc['version']), as_info(gcc['cxx_version']), as_notice(gcc['path']) ) ) add_toolchain( version, cls( version, gcc['cxx_version'], gcc['version'], gcc['path'] ) )
def update(self, env, project, toolchain, variant, build_root, working_dir, final_dir_offset): logger.debug("Update project [{}] for [{}, {}]".format( as_notice(project), as_notice(toolchain), as_notice(variant))) logger.trace( "Update project [{}] working_dir [{}], final_dir [{}]".format( as_notice(project), as_notice(working_dir), as_notice(final_dir_offset))) if project not in self._projects: title = os.path.splitext(project)[0] directory, filename = os.path.split(title) cbs_file_name = filename if cbs_file_name in ['sconscript', 'SConscript', 'Sconscript']: cbs_file_name = os.path.split(directory)[1] if cbs_file_name == ".": cbs_file_name = os.path.split( os.path.abspath(env['sconscript_dir']))[1] if not cbs_file_name: cbs_file_name = "sconscript" if not self._place_cbs_by_sconscript: directory = env['working_dir'] directory = os.path.join(directory, "cbs") project_file = directory + os.path.sep + cbs_file_name + ".cbp" execution_dir = '' if directory: execution_dir = os.path.relpath(os.getcwd(), directory) execution_dir = (os.path.pardir + os.path.sep + os.path.join( execution_dir, os.path.split(os.path.abspath(os.getcwd()))[1])) self._projects[project] = {} self._projects[project]['title'] = title self._projects[project]['directory'] = directory self._projects[project]['path'] = os.path.join( os.getcwd(), directory) self._projects[project]['execution_dir'] = execution_dir self._projects[project]['project_file'] = project_file self._projects[project]['variants'] = set() self._projects[project]['toolchains'] = set() self._projects[project]['files'] = set() self._projects[project]['targets'] = {} self._projects[project]['lines_header'] = [] self._projects[project]['lines_footer'] = [] if not self._projects[project]['lines_header']: self._projects[project]['lines_header'] = self.create_header( self._projects[project]['title'], self._projects[project]['execution_dir']) if not self._projects[project]['lines_footer']: self._projects[project]['lines_footer'] = self.create_footer() self._projects[project]['variants'].add(variant) self._projects[project]['toolchains'].add(toolchain) working_dir_path = os.path.join( self._projects[project]['execution_dir'], working_dir) final_dir_path = os.path.normpath( os.path.join(working_dir_path, final_dir_offset)) target = "{}-{}".format(toolchain, variant) test_actions = ["", "--test"] for action in test_actions: target_name = target + action if target_name not in self._projects[project]['targets']: self._projects[project]['targets'][ target_name] = self.create_target(target_name, project, toolchain, variant, action, working_dir_path, final_dir_path)
def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ): kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE timing_enabled = logger.isEnabledFor( logging.DEBUG ) suppress_output = False if 'suppress_output' in kwargs: suppress_output = kwargs['suppress_output'] del kwargs['suppress_output'] use_shell = False if 'scons_env' in kwargs: use_shell = kwargs['scons_env'].get_option( 'use-shell' ) del kwargs['scons_env'] try: process = None stderr_thread = None timer = timing_enabled and cuppa.timer.Timer() or None if timer: logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) ) close_fds = platform.system() == "Windows" and False or True if not suppress_output: sys.stdout.write( " ".join(args_list) + "\n" ) process = subprocess.Popen( use_shell and " ".join(args_list) or args_list, **dict( kwargs, close_fds=close_fds, shell=use_shell, universal_newlines=True ) ) stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor ) stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor ) stderr_thread = threading.Thread( target=stderr_consumer ) stderr_thread.start() stdout_consumer(); stderr_thread.join() process.wait() if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) return process.returncode except Exception as e: if timer: timer.stop() logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) ) logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) ) if process: logger.info( "Killing existing POpen object" ) process.kill() if stderr_thread: logger.info( "Joining any running threads" ) stderr_thread.join() raise e
def on_progress(cls, progress, sconscript, variant, env, target, source): if progress == 'sconstruct_end': logger.trace("Destination dirs = [{}]".format( colour_items(cls.destination_dirs.keys()))) logger.trace("cls.all_reports dirs = [{}]".format( colour_items(cls.all_reports.keys()))) for destination_dir, final_dirs in six.iteritems( cls.destination_dirs): master_index_path = os.path.join(destination_dir, "test-report-index.html") master_report_path = os.path.join(destination_dir, "test-report-index.json") logger.debug("Master test report index path = [{}]".format( as_notice(master_index_path))) template = cls.get_template() summaries = {} summaries['vcs_info'] = initialise_test_linking( env, link_style="raw") url, repository, branch, remote, revision = summaries[ 'vcs_info'] summaries['name'] = str(env.Dir(destination_dir)) + "/*" summaries['title'] = url and url or env['sconstruct_dir'] summaries['branch'] = branch and branch or "None" summaries['commit'] = remote and remote or "None" summaries['uri'] = url and url or "Local" summaries['toolchain_variants'] = {} summaries['reports'] = {} for report_dir, json_reports in six.iteritems(cls.all_reports): common, tail1, tail2 = cuppa.path.split_common( report_dir, destination_dir) logger.trace("common, tail1, tail2 = {}, {}, {}".format( as_info(common), as_notice(tail1), as_notice(tail2))) if common and (not tail1 or not tail2): for json_report in json_reports: summary = CollateReportIndexAction._read( str(json_report)) toolchain_variant = summary[ 'toolchain_variant_dir'] cls._update_toolchain_variant_summary( summaries, toolchain_variant, summary) summary_name = summary['name'] if not summary_name in summaries['reports']: summaries['reports'][summary_name] = {} summaries['reports'][summary_name][ 'variants'] = {} summaries['reports'][summary_name]['variants'][ toolchain_variant] = summary report_list = summaries['reports'].items() report_list = sorted(report_list) for name, report in report_list: report['default_variant'] = None report['default_summary_rel_path'] = None variant_count = 0 status_rank = 0 for variant in six.itervalues(report['variants']): variant_count += 1 index = cls._ranked_status().index(variant['status']) if index > status_rank: status_rank = index if not report['default_variant']: report['default_variant'] = variant[ 'toolchain_variant_dir'] report['default_summary_rel_path'] = variant[ 'summary_rel_path'] report['variant_count'] = variant_count report['status'] = cls._ranked_status()[status_rank] report[ 'selector'] = GenerateHtmlReportBuilder._selector_from_name( name) report[ 'style'] = GenerateHtmlReportBuilder._status_bootstrap_style( report['status']) report[ 'text_colour'] = GenerateHtmlReportBuilder._status_bootstrap_text_colour( report['status']) summaries_json_report = json.dumps(summaries, sort_keys=True, indent=4, separators=(',', ': ')) logger.trace("summaries = \n{}".format(summaries_json_report)) with open(master_report_path, 'w') as master_report_file: master_report_file.write(summaries_json_report) templateRendered = template.render(summaries=summaries, report_list=report_list, next=next, len=len) with open(master_index_path, 'w') as master_index_file: master_index_file.write(encode(templateRendered))
def _print_setting(self, action, key, value): logger.info("{} [{}] = [{}]".format(action, as_notice(key), as_notice(str(value))))
def __init__( self, base_path = os.path.abspath( '.' ), branch_root = None, default_options = {}, default_projects = [], default_variants = [], default_dependencies = [], default_profiles = [], default_runner = None, configure_callback = None, dependencies = {}, tools = [] ): cuppa.version.check_current_version() set_base_options() initialise_logging() cuppa_env = CuppaEnvironment() cuppa_env.add_tools( tools ) self.initialise_options( cuppa_env, default_options, dependencies ) cuppa_env['configured_options'] = {} self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback ) verbosity = cuppa_env.get_option( 'verbosity' ) if verbosity: set_logging_level( verbosity ) cuppa_env['sconstruct_file'] = cuppa_env.get_option( 'file' ) if not cuppa_env['sconstruct_file']: for path in [ 'SConstruct', 'Sconstruct', 'sconstruct' ]: if os.path.exists( path ): cuppa_env['sconstruct_file'] = path cuppa_env['raw_output'] = cuppa_env.get_option( 'raw_output' ) and True or False cuppa_env['standard_output'] = cuppa_env.get_option( 'standard_output' ) and True or False if not cuppa_env['raw_output'] and not cuppa_env['standard_output']: cuppa_env.colouriser().enable() reset_logging_format() logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) ) help = cuppa_env.get_option( 'help' ) and True or False self._configure.load() cuppa_env['minimal_output'] = cuppa_env.get_option( 'minimal_output' ) cuppa_env['ignore_duplicates'] = cuppa_env.get_option( 'ignore_duplicates' ) cuppa_env['working_dir'] = os.getcwd() cuppa_env['launch_dir'] = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] ) cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "." cuppa_env['launch_offset_dir'] = "." if not cuppa_env['run_from_launch_dir']: levels = len( cuppa_env['launch_dir'].split( os.path.sep ) ) cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] ) cuppa_env['base_path'] = os.path.normpath( os.path.expanduser( base_path ) ) cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None thirdparty = cuppa_env.get_option( 'thirdparty' ) if thirdparty: thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) ) cuppa_env['thirdparty'] = thirdparty build_root = cuppa_env.get_option( 'build_root', default='_build' ) cuppa_env['build_root'] = os.path.normpath( os.path.expanduser( build_root ) ) download_root = cuppa_env.get_option( 'download_root', default='_cuppa' ) cuppa_env['download_root'] = os.path.normpath( os.path.expanduser( download_root ) ) cache_root = cuppa_env.get_option( 'cache_root', default='~/_cuppa/_cache' ) cuppa_env['cache_root'] = os.path.normpath( os.path.expanduser( cache_root ) ) if not os.path.exists( cuppa_env['cache_root'] ): os.makedirs( cuppa_env['cache_root'] ) cuppa_env['default_projects'] = default_projects cuppa_env['default_variants'] = default_variants and set( default_variants ) or set() cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or [] cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies'] cuppa_env['dependencies'] = {} cuppa_env['default_profiles'] = default_profiles and default_profiles or [] cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles'] cuppa_env['profiles'] = {} test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' ) cuppa_env['default_runner'] = test_runner cuppa_env['show_test_output'] = cuppa_env.get_option( 'show-test-output' ) and True or False self.add_variants ( cuppa_env ) self.add_toolchains ( cuppa_env ) self.add_platforms ( cuppa_env ) cuppa_env['platform'] = cuppa.build_platform.Platform.current() toolchains = cuppa_env.get_option( 'toolchains' ) cuppa_env[ 'target_architectures' ] = None if not help and not self._configure.handle_conf_only(): default_toolchain = cuppa_env['platform'].default_toolchain() if not toolchains: toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ] else: toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ] cuppa_env['active_toolchains'] = toolchains def add_dependency( name, dependency ): cuppa_env['dependencies'][name] = dependency cuppa.modules.registration.get_options( "methods", cuppa_env ) if not help and not self._configure.handle_conf_only(): cuppa_env[self.project_generators_key] = {} cuppa.modules.registration.add_to_env( "dependencies", cuppa_env, add_dependency ) cuppa.modules.registration.add_to_env( "profiles", cuppa_env ) cuppa.modules.registration.add_to_env( "methods", cuppa_env ) cuppa.modules.registration.add_to_env( "project_generators", cuppa_env ) for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ): method_plugin.load().add_to_env( cuppa_env ) if dependencies: for name, dependency in dependencies.iteritems(): dependency.add_to_env( cuppa_env, add_dependency ) # TODO - default_profile if cuppa_env.get_option( 'dump' ): cuppa_env.dump() SCons.Script.Exit() job_count = cuppa_env.get_option( 'num_jobs' ) parallel = cuppa_env.get_option( 'parallel' ) parallel_mode = "manually" if job_count==1 and parallel: job_count = multiprocessing.cpu_count() if job_count > 1: SCons.Script.SetOption( 'num_jobs', job_count ) parallel_mode = "automatically" cuppa_env['job_count'] = job_count cuppa_env['parallel'] = parallel if job_count>1: logger.debug( "Running in {} with option [{}] set {} as [{}]".format( as_emphasised("parallel mode"), as_info( "jobs" ), as_emphasised(parallel_mode), as_info( str( SCons.Script.GetOption( 'num_jobs') ) ) ) ) if not help and self._configure.handle_conf_only(): self._configure.save() if not help and not self._configure.handle_conf_only(): self.build( cuppa_env ) if self._configure.handle_conf_only(): print "cuppa: Handling onfiguration only, so no builds will be attempted." print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:" print "" print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) ) print "" print "cuppa: Nothing to be done. Exiting." SCons.Script.Exit()
def __call__(self, env, target, source, libraries, linktype): variant = variant_name(env['variant'].name()) target_arch = env['target_arch'] toolchain = env['toolchain'] stage_dir = stage_directory(toolchain, variant, target_arch, toolchain.abi_flag(env)) library_action = BoostLibraryAction(env, stage_dir, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config) library_emitter = BoostLibraryEmitter(env, stage_dir, libraries, self._add_dependents, linktype, self._boost) logger.debug("env = [{}]".format(as_info(env['build_dir']))) env.AppendUnique( BUILDERS={ 'BoostLibraryBuilder': env.Builder(action=library_action, emitter=library_emitter) }) bjam_exe = 'bjam' if platform.system() == "Windows": bjam_exe += ".exe" bjam_target = os.path.join(self._boost.local(), bjam_exe) bjam = env.Command(bjam_target, [], BuildBjam(self._boost)) env.NoClean(bjam) built_libraries = env.BoostLibraryBuilder(target, source) built_library_map = {} for library in built_libraries: # Extract the library name from the library filename. # Possibly use regex instead? name = os.path.split(str(library))[1] name = name.split(".")[0] name = name.split("-")[0] name = "_".join(name.split("_")[1:]) built_library_map[name] = library logger.trace("Built Library Map = [{}]".format( colour_items(built_library_map.keys()))) variant_key = stage_dir logger.debug("Source Libraries Variant Key = [{}]".format( as_notice(variant_key))) if not variant_key in self._library_sources: self._library_sources[variant_key] = {} logger.debug("Variant sources = [{}]".format( colour_items(self._library_sources[variant_key].keys()))) required_libraries = add_dependent_libraries(self._boost, linktype, libraries) logger.debug("Required libraries = [{}]".format( colour_items(required_libraries))) for library in required_libraries: if library in self._library_sources[variant_key]: logger.debug( "Library [{}] already present in variant [{}]".format( as_notice(library), as_info(variant_key))) if library not in built_library_map: logger.debug("Add Depends for [{}]".format( as_notice( self._library_sources[variant_key][library].path))) env.Depends(built_libraries, self._library_sources[variant_key][library]) else: self._library_sources[variant_key][ library] = built_library_map[library] logger.debug("Library sources for variant [{}] = [{}]".format( as_info(variant_key), colour_items( k + ":" + as_info(v.path) for k, v in self._library_sources[variant_key].iteritems()))) if built_libraries: env.Requires(built_libraries, bjam) if cuppa.build_platform.name() == "Linux": toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam") toolset_config_jam = env.Command(toolset_target, [], WriteToolsetConfigJam()) project_config_target = os.path.join(self._boost.local(), "project-config.jam") if not os.path.exists(project_config_target): project_config_jam = env.Requires( project_config_target, env.AlwaysBuild(toolset_config_jam)) env.Requires(built_libraries, project_config_jam) env.Requires(built_libraries, toolset_config_jam) install_dir = env['abs_build_dir'] if linktype == 'shared': install_dir = env['abs_final_dir'] installed_libraries = [] for library in required_libraries: logger.debug("Install Boost library [{}:{}] to [{}]".format( as_notice(library), as_info(str(self._library_sources[variant_key][library])), as_notice(install_dir))) library_node = self._library_sources[variant_key][library] logger.trace("Library Node = \n[{}]\n[{}]\n[{}]\n[{}]".format( as_notice(library_node.path), as_notice(str(library_node)), as_notice(str(library_node.get_binfo().bact)), as_notice(str(library_node.get_state())))) installed_library = env.CopyFiles( install_dir, self._library_sources[variant_key][library]) installed_libraries.append(installed_library) logger.debug("Boost 'Installed' Libraries = [{}]".format( colour_items(l.path for l in Flatten(installed_libraries)))) return Flatten(installed_libraries)
def call_project_sconscript_files( self, toolchain, variant, target_arch, sconscript_env, project ): sconscript_file = project if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ): logger.debug( "project exists and added to build [{}] using [{},{},{}]".format( as_notice( sconscript_file ), as_notice( toolchain.name() ), as_notice( variant ), as_notice( target_arch ) ) ) path_without_ext = os.path.splitext( sconscript_file )[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file ) name = os.path.splitext( sconscript_name )[0] if name.lower() == "sconscript": path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() ) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path ) sconscript_env['build_dir'] = os.path.normpath( os.path.join( build_root, path_without_ext, toolchain.name(), variant, target_arch, 'working', '' ) ) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir'] ) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir( abs_build_dir, final_dir ): return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) ) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] ) sconscript_env.AppendUnique( INCPATH = [ sconscript_env['offset_dir'] ] ) sconscript_exports = { 'env' : sconscript_env, 'sconscript_env' : sconscript_env, 'build_root' : build_root, 'build_dir' : sconscript_env['build_dir'], 'abs_build_dir' : sconscript_env['abs_build_dir'], 'final_dir' : sconscript_env['final_dir'], 'abs_final_dir' : sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project' : name, } self._configure.configure( sconscript_exports['env'] ) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports ) SCons.Script.SConscript( [ sconscript_file ], variant_dir = sconscript_exports['build_dir'], duplicate = 0, exports = sconscript_exports ) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error( sconscript_file ), as_error( toolchain.name() ), as_error( variant ), as_error( target_arch ) ) )