def dump(cls): import json def expand_node(node): if isinstance(node, list): return [expand_node(i) for i in node] elif isinstance(node, dict): return {str(k): expand_node(v) for k, v in node.iteritems()} elif isinstance(node, set): return [expand_node(s) for s in node] elif hasattr(node, "__dict__"): return { str(k): expand_node(v) for k, v in node.__dict__.iteritems() } else: return str(node) logger.info(as_info_label("Displaying Options")) options = json.dumps(expand_node(cls._options), sort_keys=True, indent=4) logger.info("\n" + options + "\n") logger.info(as_info_label("Displaying Methods")) methods = json.dumps(expand_node(cls._methods), sort_keys=True, indent=4) logger.info("\n" + methods + "\n")
def get_local_directory( self, location, sub_dir, branch_path, full_url ): logger.debug( "Determine local directory for [{location}] when {offline}".format( location=as_info(location), offline= self._offline and as_info_label("OFFLINE") or "online" ) ) local_directory = None base = self._cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( self._cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip_download.url_to_path( location ) if not pip_is_url( location ): return self.get_local_directory_for_non_url( location, sub_dir, branch_path, base ) else: self._local_folder = self.folder_name_from_path( full_url ) local_directory = os.path.join( base, self._local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): return self.get_local_directory_for_download_url( location, sub_dir, local_directory ) elif '+' in full_url.scheme: return self.get_local_directory_for_repository( location, sub_dir, full_url, local_directory ) return local_directory
def dump( cls ): import json def expand_node( node ): if isinstance( node, list ): return [ expand_node(i) for i in node ] elif isinstance( node, dict ): return { str(k): expand_node(v) for k,v in node.iteritems() } elif isinstance( node, set ): return [ expand_node(s) for s in node ] elif hasattr( node, "__dict__" ): return { str(k): expand_node(v) for k,v in node.__dict__.iteritems() } else: return str( node ) logger.info( as_info_label( "Displaying Options" ) ) options = json.dumps( expand_node(cls._options), sort_keys=True, indent=4 ) logger.info( "\n" + options + "\n" ) logger.info( as_info_label("Displaying Methods" ) ) methods = json.dumps( expand_node(cls._methods), sort_keys=True, indent=4 ) logger.info( "\n" + methods + "\n" )
def call_project_sconscript_files(self, toolchain, variant, target_arch, abi, sconscript_env, project): sconscript_file = project if os.path.exists(sconscript_file) and os.path.isfile(sconscript_file): logger.debug( "project exists and added to build [{}] using [{},{},{}]". format(as_notice(sconscript_file), as_notice(toolchain.name()), as_notice(variant), as_notice(target_arch))) path_without_ext = os.path.splitext(sconscript_file)[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file) name = os.path.splitext(sconscript_name)[0] sconscript_env['sconscript_name_id'] = name if name.lower() == "sconscript": sconscript_env['sconscript_name_id'] = "" path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] working_folder = 'working' sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name()) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path) sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir']) sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi) sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder) build_base_path = os.path.join(path_without_ext, sconscript_env['tool_variant_dir']) def flatten_dir(directory, join_char="_"): return join_char.join( os.path.normpath(directory).split(os.path.sep)) sconscript_env['build_base_path'] = build_base_path sconscript_env['flat_build_base'] = flatten_dir(build_base_path) sconscript_env['tool_variant_build_dir'] = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder) sconscript_env['build_dir'] = os.path.normpath( os.path.join(build_root, build_base_path, working_folder, '')) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir']) sconscript_env['build_tool_variant_dir'] = os.path.normpath( os.path.join(build_root, sconscript_env['tool_variant_dir'], working_folder, '')) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir']) sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join(sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'])) sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join(flatten_dir(sconscript_env['tool_variant_dir']), sconscript_env['offset_dir'])) sconscript_env[ 'final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir(abs_build_dir, final_dir): return os.path.isabs( final_dir) and final_dir or os.path.normpath( os.path.join(abs_build_dir, final_dir)) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir']) sconscript_env.AppendUnique(INCPATH=[sconscript_env['offset_dir']]) sconscript_exports = { 'env': sconscript_env, 'sconscript_env': sconscript_env, 'build_root': build_root, 'build_dir': sconscript_env['build_dir'], 'abs_build_dir': sconscript_env['abs_build_dir'], 'final_dir': sconscript_env['final_dir'], 'abs_final_dir': sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project': name, } self._configure.configure(sconscript_exports['env']) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports) if sconscript_env['dump']: logger.info("{} {}".format( as_info_label("Dumping ENV for"), as_info(sconscript_exports['build_dir']))) dump = sconscript_env.Dump() logger.info("\n" + dump + "\n") else: SCons.Script.SConscript( [sconscript_file], variant_dir=sconscript_exports['build_dir'], duplicate=0, exports=sconscript_exports) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error(sconscript_file), as_error(toolchain.name()), as_error(variant), as_error(target_arch)))
def __init__(self, sconstruct_path, base_path=os.path.abspath('.'), branch_root=None, default_options={}, default_projects=[], default_variants=[], default_dependencies=[], default_profiles=[], dependencies=[], profiles=[], default_runner=None, configure_callback=None, tools=[]): cuppa.core.base_options.set_base_options() cuppa_env = cuppa.core.environment.CuppaEnvironment() cuppa_env.add_tools(tools) dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies") profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles") self.initialise_options(cuppa_env, default_options, profiles, dependencies) cuppa_env['configured_options'] = {} self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback) enable_thirdparty_logging( cuppa_env.get_option('enable-thirdparty-logging') and True or False) self._set_verbosity_level(cuppa_env) cuppa_env['sconstruct_path'] = sconstruct_path cuppa_env['sconstruct_dir'], cuppa_env[ 'sconstruct_file'] = os.path.split(sconstruct_path) self._set_output_format(cuppa_env) self._configure.load() cuppa_env['offline'] = cuppa_env.get_option('offline') cuppa.version.check_current_version(cuppa_env['offline']) if cuppa_env['offline']: logger.info(as_info_label("Running in OFFLINE mode")) logger.info("using sconstruct file [{}]".format( as_notice(cuppa_env['sconstruct_file']))) if dependencies_warning: logger.warn(dependencies_warning) if profiles_warning: logger.warn(profiles_warning) help = cuppa_env.get_option('help') and True or False cuppa_env['minimal_output'] = cuppa_env.get_option('minimal_output') cuppa_env['ignore_duplicates'] = cuppa_env.get_option( 'ignore_duplicates') cuppa_env['working_dir'] = os.getcwd() cuppa_env['launch_dir'] = os.path.relpath(SCons.Script.GetLaunchDir(), cuppa_env['working_dir']) cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "." cuppa_env['launch_offset_dir'] = "." if not cuppa_env['run_from_launch_dir']: levels = len(cuppa_env['launch_dir'].split(os.path.sep)) cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)]) cuppa_env['base_path'] = os.path.normpath( os.path.expanduser(base_path)) cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser(branch_root)) or base_path cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root']) or None thirdparty = cuppa_env.get_option('thirdparty') if thirdparty: thirdparty = os.path.normpath(os.path.expanduser(thirdparty)) cuppa_env['thirdparty'] = thirdparty cuppa.core.storage_options.process_storage_options(cuppa_env) cuppa.core.location_options.process_location_options(cuppa_env) cuppa_env['current_branch'] = '' cuppa_env['current_revision'] = '' if not help and not self._configure.handle_conf_only(): if cuppa_env['location_match_current_branch']: url, repo, branch, remote, rev = cuppa.scms.scms.get_current_rev_info( cuppa_env['sconstruct_dir']) if branch: cuppa_env['current_branch'] = branch if rev: cuppa_env['current_revision'] = rev logger.info( "Current build on branch [{}] at revision [{}] from remote [{}] in [{}] at [{}]" .format(as_info(str(branch)), as_info(str(rev)), as_info(str(remote)), as_info(str(repo)), as_info(str(url)))) cuppa_env['default_projects'] = default_projects cuppa_env['default_variants'] = default_variants and set( default_variants) or set() cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or [] cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies'] cuppa_env['dependencies'] = {} cuppa_env[ 'default_profiles'] = default_profiles and default_profiles or [] cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles'] cuppa_env['profiles'] = {} test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process') cuppa_env['default_runner'] = test_runner cuppa_env['propagate_env'] = cuppa_env.get_option( 'propagate-env') and True or False cuppa_env['propagate_path'] = cuppa_env.get_option( 'propagate-path') and True or False cuppa_env['merge_path'] = cuppa_env.get_option( 'merge-path') and True or False cuppa_env['show_test_output'] = cuppa_env.get_option( 'show-test-output') and True or False cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output') and True or False cuppa_env['dump'] = cuppa_env.get_option('dump') and True or False cuppa_env['clean'] = cuppa_env.get_option('clean') and True or False self.add_variants(cuppa_env) self.add_toolchains(cuppa_env) self.add_platforms(cuppa_env) cuppa_env['platform'] = cuppa.build_platform.Platform.current() toolchains = cuppa_env.get_option('toolchains') cuppa_env['target_architectures'] = None if not help and not self._configure.handle_conf_only(): default_toolchain = cuppa_env['platform'].default_toolchain() if not toolchains: toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ] else: toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ] cuppa_env['active_toolchains'] = toolchains def add_profile(name, profile): cuppa_env['profiles'][name] = profile def add_dependency(name, dependency): cuppa_env['dependencies'][name] = dependency cuppa.modules.registration.get_options("methods", cuppa_env) if not help and not self._configure.handle_conf_only(): cuppa_env[self.project_generators_key] = {} cuppa.modules.registration.add_to_env("dependencies", cuppa_env, add_dependency) cuppa.modules.registration.add_to_env("profiles", cuppa_env, add_profile) cuppa.modules.registration.add_to_env("methods", cuppa_env) cuppa.modules.registration.add_to_env("project_generators", cuppa_env) for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None): method_plugin.load().add_to_env(cuppa_env) for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None): profile_plugin.load().add_to_env(cuppa_env) if profiles: for profile in profiles: profile.add_to_env(cuppa_env, add_profile) logger.trace("available profiles are [{}]".format( colour_items(sorted(cuppa_env["profiles"].keys())))) logger.info("default profiles are [{}]".format( colour_items(sorted(cuppa_env["default_profiles"]), as_info))) for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None): dependency_plugin.load().add_to_env( cuppa_env, add_dependency) if dependencies: for dependency in dependencies: dependency.add_to_env(cuppa_env, add_dependency) logger.trace("available dependencies are [{}]".format( colour_items(sorted(cuppa_env["dependencies"].keys())))) logger.info("default dependencies are [{}]".format( colour_items(sorted(cuppa_env["default_dependencies"]), as_info))) # TODO - default_profile if cuppa_env['dump']: logger.info( as_info_label( "Running in DUMP mode, no building will be attempted")) cuppa_env.dump() job_count = cuppa_env.get_option('num_jobs') parallel = cuppa_env.get_option('parallel') parallel_mode = "manually" if job_count == 1 and parallel: job_count = multiprocessing.cpu_count() if job_count > 1: SCons.Script.SetOption('num_jobs', job_count) parallel_mode = "automatically" cuppa_env['job_count'] = job_count cuppa_env['parallel'] = parallel if job_count > 1: logger.info( "Running in {} with option [{}] set {} as [{}]".format( as_emphasised("parallel mode"), as_info("jobs"), as_emphasised(parallel_mode), as_info(str(SCons.Script.GetOption('num_jobs'))))) if not help and self._configure.handle_conf_only(): self._configure.save() if not help and not self._configure.handle_conf_only(): self.build(cuppa_env) if self._configure.handle_conf_only(): print( "cuppa: Handling configuration only, so no builds will be attempted." ) print( "cuppa: With the current configuration executing 'scons -D' would be equivalent to:" ) print("") print("scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options']))) print("") print("cuppa: Nothing to be done. Exiting.") SCons.Script.Exit()
def call_project_sconscript_files( self, toolchain, variant, target_arch, abi, sconscript_env, project ): sconscript_file = project if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ): logger.debug( "project exists and added to build [{}] using [{},{},{}]".format( as_notice( sconscript_file ), as_notice( toolchain.name() ), as_notice( variant ), as_notice( target_arch ) ) ) path_without_ext = os.path.splitext( sconscript_file )[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file ) name = os.path.splitext( sconscript_name )[0] sconscript_env['sconscript_name_id'] = name if name.lower() == "sconscript": sconscript_env['sconscript_name_id'] = "" path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] working_folder = 'working' sconscript_env = sconscript_env.Clone() sconscript_env['sconscript_env'] = sconscript_env sconscript_env['sconscript_build_dir'] = path_without_ext sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() ) sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path ) sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir'] ) sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi ) sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder ) build_base_path = os.path.join( path_without_ext, sconscript_env['tool_variant_dir'] ) def flatten_dir( directory, join_char="_" ): return join_char.join( os.path.normpath( directory ).split( os.path.sep ) ) sconscript_env['build_base_path'] = build_base_path sconscript_env['flat_build_base'] = flatten_dir( build_base_path ) sconscript_env['tool_variant_build_dir'] = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder ) sconscript_env['build_dir'] = os.path.normpath( os.path.join( build_root, build_base_path, working_folder, '' ) ) sconscript_env['abs_build_dir'] = os.path.abspath( sconscript_env['build_dir'] ) sconscript_env['build_tool_variant_dir'] = os.path.normpath( os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder, '' ) ) sconscript_env['offset_dir'] = sconstruct_offset_path sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir'] ) sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join( sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'] ) ) sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join( flatten_dir( sconscript_env['tool_variant_dir'] ), sconscript_env['offset_dir'] ) ) sconscript_env['final_dir'] = '..' + os.path.sep + 'final' + os.path.sep sconscript_env['active_toolchain'] = toolchain def abs_final_dir( abs_build_dir, final_dir ): return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) ) sconscript_env['abs_final_dir'] = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] ) sconscript_env.AppendUnique( INCPATH = [ sconscript_env['offset_dir'] ] ) sconscript_exports = { 'env' : sconscript_env, 'sconscript_env' : sconscript_env, 'build_root' : build_root, 'build_dir' : sconscript_env['build_dir'], 'abs_build_dir' : sconscript_env['abs_build_dir'], 'final_dir' : sconscript_env['final_dir'], 'abs_final_dir' : sconscript_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project' : name, } self._configure.configure( sconscript_exports['env'] ) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports ) if sconscript_env['dump']: logger.info( "{} {}".format( as_info_label( "Dumping ENV for"), as_info( sconscript_exports['build_dir'] ) ) ) dump = sconscript_env.Dump() logger.info( "\n" + dump + "\n" ) else: SCons.Script.SConscript( [ sconscript_file ], variant_dir = sconscript_exports['build_dir'], duplicate = 0, exports = sconscript_exports ) else: logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format( as_error( sconscript_file ), as_error( toolchain.name() ), as_error( variant ), as_error( target_arch ) ) )
def __init__( self, sconstruct_path, base_path = os.path.abspath( '.' ), branch_root = None, default_options = {}, default_projects = [], default_variants = [], default_dependencies = [], default_profiles = [], dependencies = [], profiles = [], default_runner = None, configure_callback = None, tools = [] ): cuppa.core.base_options.set_base_options() cuppa_env = cuppa.core.environment.CuppaEnvironment() cuppa_env.add_tools( tools ) dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies" ) profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles" ) self.initialise_options( cuppa_env, default_options, profiles, dependencies ) cuppa_env['configured_options'] = {} self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback ) enable_thirdparty_logging( cuppa_env.get_option( 'enable-thirdparty-logging' ) and True or False ) self._set_verbosity_level( cuppa_env ) cuppa_env['sconstruct_path'] = sconstruct_path cuppa_env['sconstruct_dir'], cuppa_env['sconstruct_file'] = os.path.split(sconstruct_path) self._set_output_format( cuppa_env ) self._configure.load() cuppa_env['offline'] = cuppa_env.get_option( 'offline' ) cuppa.version.check_current_version( cuppa_env['offline'] ) if cuppa_env['offline']: logger.info( as_info_label( "Running in OFFLINE mode" ) ) logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) ) if dependencies_warning: logger.warn( dependencies_warning ) if profiles_warning: logger.warn( profiles_warning ) help = cuppa_env.get_option( 'help' ) and True or False cuppa_env['minimal_output'] = cuppa_env.get_option( 'minimal_output' ) cuppa_env['ignore_duplicates'] = cuppa_env.get_option( 'ignore_duplicates' ) cuppa_env['working_dir'] = os.getcwd() cuppa_env['launch_dir'] = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] ) cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "." cuppa_env['launch_offset_dir'] = "." if not cuppa_env['run_from_launch_dir']: levels = len( cuppa_env['launch_dir'].split( os.path.sep ) ) cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] ) cuppa_env['base_path'] = os.path.normpath( os.path.expanduser( base_path ) ) cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None thirdparty = cuppa_env.get_option( 'thirdparty' ) if thirdparty: thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) ) cuppa_env['thirdparty'] = thirdparty cuppa.core.storage_options.process_storage_options( cuppa_env ) cuppa.core.location_options.process_location_options( cuppa_env ) cuppa_env['default_projects'] = default_projects cuppa_env['default_variants'] = default_variants and set( default_variants ) or set() cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or [] cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies'] cuppa_env['dependencies'] = {} cuppa_env['default_profiles'] = default_profiles and default_profiles or [] cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles'] cuppa_env['profiles'] = {} test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' ) cuppa_env['default_runner'] = test_runner cuppa_env['propagate_env'] = cuppa_env.get_option( 'propagate-env' ) and True or False cuppa_env['propagate_path'] = cuppa_env.get_option( 'propagate-path' ) and True or False cuppa_env['merge_path'] = cuppa_env.get_option( 'merge-path' ) and True or False cuppa_env['show_test_output'] = cuppa_env.get_option( 'show-test-output' ) and True or False cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output' ) and True or False cuppa_env['dump'] = cuppa_env.get_option( 'dump' ) and True or False cuppa_env['clean'] = cuppa_env.get_option( 'clean' ) and True or False self.add_variants ( cuppa_env ) self.add_toolchains ( cuppa_env ) self.add_platforms ( cuppa_env ) cuppa_env['platform'] = cuppa.build_platform.Platform.current() toolchains = cuppa_env.get_option( 'toolchains' ) cuppa_env[ 'target_architectures' ] = None if not help and not self._configure.handle_conf_only(): default_toolchain = cuppa_env['platform'].default_toolchain() if not toolchains: toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ] else: toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ] cuppa_env['active_toolchains'] = toolchains def add_profile( name, profile ): cuppa_env['profiles'][name] = profile def add_dependency( name, dependency ): cuppa_env['dependencies'][name] = dependency cuppa.modules.registration.get_options( "methods", cuppa_env ) if not help and not self._configure.handle_conf_only(): cuppa_env[self.project_generators_key] = {} cuppa.modules.registration.add_to_env( "dependencies", cuppa_env, add_dependency ) cuppa.modules.registration.add_to_env( "profiles", cuppa_env, add_profile ) cuppa.modules.registration.add_to_env( "methods", cuppa_env ) cuppa.modules.registration.add_to_env( "project_generators", cuppa_env ) for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ): method_plugin.load().add_to_env( cuppa_env ) for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ): profile_plugin.load().add_to_env( cuppa_env ) if profiles: for profile in profiles: profile.add_to_env( cuppa_env, add_profile ) logger.trace( "available profiles are [{}]".format( colour_items( sorted( cuppa_env["profiles"].keys() ) ) ) ) logger.info( "default profiles are [{}]".format( colour_items( sorted( cuppa_env["default_profiles"] ), as_info ) ) ) for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ): dependency_plugin.load().add_to_env( cuppa_env, add_dependency ) if dependencies: for dependency in dependencies: dependency.add_to_env( cuppa_env, add_dependency ) logger.trace( "available dependencies are [{}]".format( colour_items( sorted( cuppa_env["dependencies"].keys() ) ) ) ) logger.info( "default dependencies are [{}]".format( colour_items( sorted( cuppa_env["default_dependencies"] ), as_info ) ) ) # TODO - default_profile if cuppa_env['dump']: logger.info( as_info_label( "Running in DUMP mode, no building will be attempted" ) ) cuppa_env.dump() job_count = cuppa_env.get_option( 'num_jobs' ) parallel = cuppa_env.get_option( 'parallel' ) parallel_mode = "manually" if job_count==1 and parallel: job_count = multiprocessing.cpu_count() if job_count > 1: SCons.Script.SetOption( 'num_jobs', job_count ) parallel_mode = "automatically" cuppa_env['job_count'] = job_count cuppa_env['parallel'] = parallel if job_count>1: logger.debug( "Running in {} with option [{}] set {} as [{}]".format( as_emphasised("parallel mode"), as_info( "jobs" ), as_emphasised(parallel_mode), as_info( str( SCons.Script.GetOption( 'num_jobs') ) ) ) ) if not help and self._configure.handle_conf_only(): self._configure.save() if not help and not self._configure.handle_conf_only(): self.build( cuppa_env ) if self._configure.handle_conf_only(): print "cuppa: Handling configuration only, so no builds will be attempted." print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:" print "" print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) ) print "" print "cuppa: Nothing to be done. Exiting." SCons.Script.Exit()
def __init__( self, cuppa_env, location, develop=None, branch_path=None, extra_sub_path=None, name_hint=None ): logger.debug( "Create location using location=[{}], develop=[{}], branch_path=[{}], extra_sub_path=[{}], name_hint=[{}]".format( as_info( location ), as_info( str(develop) ), as_info( str(branch_path) ), as_info( str(extra_sub_path) ), as_info( str(name_hint) ) ) ) self._cuppa_env = cuppa_env self._supports_relative_versioning = False self._current_branch = self._cuppa_env['current_branch'] self._current_revision = self._cuppa_env['current_revision'] self._offline = self.option_set('offline') offline = self._offline self._default_branch = self._cuppa_env['location_default_branch'] location = self.replace_sconstruct_anchor( location ) if develop: if not os.path.isabs( develop ): develop = '#' + develop develop = self.replace_sconstruct_anchor( develop ) logger.debug( "Develop location specified [{}]".format( as_info( develop ) ) ) if self.option_set('develop') and develop: location = develop logger.debug( "--develop specified so using location=develop=[{}]".format( as_info( develop ) ) ) scm_location = location if location[-1] == '@': self._supports_relative_versioning = True scm_location = location[:-1] scm_system, vc_type, repo_location, versioning = self.get_scm_system_and_info( self.expand_secret( scm_location ) ) logger.debug( "Local location and actions for [{location}] being determined in context:{offline}" " vc_type=[{vc_type}], repo_location=[{repo_location}]," " versioning=[{versioning}]".format( location = as_info(location), offline = self._offline and " " + as_info_label("OFFLINE") + "," or "", vc_type = as_info(str(vc_type)), repo_location = as_info(str(repo_location)), versioning = as_info(str(versioning)) ) ) if self._supports_relative_versioning: if self.location_match_current_branch(): if not scm_system: logger.warn( "Location [{}] specified using relative versioning, but no SCM system is available" " that matches the version control type [{}]. Relative versioning will be ignored" " for this location.".format( location, vc_type ) ) else: branch_exists = False logger.debug( "Relative branching active for [{location}] with" " current branch [{branch}] and current revision [{revision}]".format( location=as_info(str(location)), branch=as_info(str(self._current_branch)), revision=as_info(str(self._current_revision)) ) ) if self._current_branch: # Try to checkout on the explicit branch but if that fails fall back to # to the default by stripping off the '@' from the end of the path if not offline and scm_system.remote_branch_exists( repo_location, self._current_branch ): scm_location = location + self._current_branch logger.trace( "scm_location = [{scm_location}]".format( scm_location=as_info(str(scm_location)) ) ) elif self._current_revision: # Try to checkout on the explicit branch but if that fails fall back to # to the default by stripping off the '@' from the end of the path if not offline and scm_system.remote_branch_exists( repo_location, self._current_revision ): scm_location = location + self._current_revision logger.trace( "scm_location = [{scm_location}]".format( scm_location=as_info(str(scm_location)) ) ) elif scm_system and not offline: self._default_branch = scm_system.remote_default_branch( repo_location ) if self._default_branch: scm_location = location + self._default_branch elif( scm_system and not versioning and not offline and self.option_set('location_explicit_default_branch') ): self._default_branch = scm_system.remote_default_branch( repo_location ) if self._default_branch: scm_location = location + '@' + self._default_branch location = scm_location self._location = os.path.expanduser( location ) self._full_url = urlparse( self._location ) self._sub_dir = None self._name_hint = name_hint if extra_sub_path: if os.path.isabs( extra_sub_path ): raise LocationException( "Error extra sub path [{}] is not relative".format(extra_sub_path) ) else: self._sub_dir = os.path.normpath( extra_sub_path ) ## Get the location for the source dependency. If the location is a URL or an Archive we'll need to ## retrieve the URL and extract the archive. get_local_directory() returns the location of the source ## once this is done local_directory = self.get_local_directory( self._location, self._sub_dir, branch_path, self._full_url ) logger.trace( "Local Directory for [{}] returned as [{}]".format( as_notice( self._location ), as_notice( local_directory ) ) ) self._base_local_directory = local_directory self._local_directory = self._sub_dir and os.path.join( local_directory, self._sub_dir ) or local_directory ## Now that we have a locally accessible version of the dependency we can try to collate some information ## about it to allow us to specify what we are building with. self._url, self._repository, self._branch, self._remote, self._revision = self.get_info( self._location, self._local_directory, self._full_url ) self._version, self._revision = self.ver_rev_summary( self._branch, self._revision, self._full_url.path ) logger.debug( "Using [{}]{}{} at [{}] stored in [{}]".format( as_info( location ), ( self._branch and ":[{}]".format( as_info( str(self._branch) ) ) or "" ), ( self._remote and " from [{}]".format( as_info( str(self._remote) ) ) or "" ), as_info( self._version ), as_notice( self._local_directory ) ) )
def get_local_directory_for_repository( self, location, sub_dir, full_url, local_directory ): vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend( vc_type ) if not backend: logger.error( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format( as_error( vc_type ), as_error( location ) ) ) raise LocationException( "URL VC of [{}] for [{}] NOT recognised so location cannot be retrieved".format( vc_type, location ) ) if self._cuppa_env['dump'] or self._cuppa_env['clean']: return local_directory local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if not self._offline: try: vcs_backend = backend( self.expand_secret( location ) ) except: # Pip version >= 19 backend.url = self.expand_secret( location ) vcs_backend = backend if os.path.exists( local_directory ): self.update_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ) else: self.obtain_from_repository( location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ) logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: branched_local_directory = None if self.location_match_current_branch(): # If relative versioning is in play and we are offline check first to see # if the specified branch or tag is available and prefer that one if self._supports_relative_versioning and self._current_branch: branched_local_directory = local_directory + "@" + self._current_branch if os.path.exists( branched_local_directory ): return branched_local_directory elif self._supports_relative_versioning and self._current_revision: branched_local_directory = local_directory + "@" + self._current_revision if os.path.exists( branched_local_directory ): return branched_local_directory elif self._supports_relative_versioning and self._default_branch: branched_local_directory = local_directory + "@" + self._default_branch if os.path.exists( branched_local_directory ): return branched_local_directory # If the preferred branch is not available then fallback to the # default of no branch being specified if os.path.exists( local_directory ): return local_directory else: if self.location_match_current_branch(): logger.error( "Running in {offline} mode and neither [{local_dir}] or a branched dir" " [{branched_dir}] exists so location cannot be retrieved".format( offline = as_info_label("OFFLINE"), local_dir = as_error(local_directory), branched_dir = as_error(str(branched_local_directory)) ) ) raise LocationException( "Running in {offline} mode and neither [{local_dir}] or a branched dir" " [{branched_dir}] exists so location cannot be retrieved".format( offline = "OFFLINE", local_dir = local_directory, branched_dir = str(branched_local_directory) ) ) else: logger.error( "Running in {offline} mode and [{local_dir}] does not exist" " so location cannot be retrieved".format( offline = as_info_label("OFFLINE"), local_dir = as_error(local_directory) ) ) raise LocationException( "Running in {offline} mode and [{local_dir}] does not exist" " so location cannot be retrieved".format( offline = "OFFLINE", local_dir = local_directory ) ) return local_directory