def update_process(my_workspace_path, my_project_scope): # Get the environment set up. logging.log(MuLogging.SECTION, "Bootstrapping Enviroment") logging.info("## Parsing environment...") (build_env, shell_env) = minimum_env_init(my_workspace_path, my_project_scope) logging.info("Done.\n") # Update the environment. logging.info("## Updating environment...") SelfDescribingEnvironment.UpdateDependencies(my_workspace_path, my_project_scope) logging.info("Done.\n")
def minimum_env_init(my_workspace_path, my_project_scope): # TODO: Check the Git version against minimums. # Check the Python version against minimums. cur_py = "%d.%d.%d" % sys.version_info[:3] VersionAggregator.GetVersionAggregator().ReportVersion( "Python", cur_py, VersionAggregator.VersionTypes.TOOL) soft_min_py = "3.7" hard_min_py = "3.6" if version_compare(hard_min_py, cur_py) > 0: raise RuntimeError( "Please upgrade Python! Current version is %s. Minimum is %s." % (cur_py, hard_min_py)) if version_compare(soft_min_py, cur_py) > 0: logging.error( "Please upgrade Python! Current version is %s. Recommended minimum is %s." % (cur_py, soft_min_py)) return_buffer = StringIO() RunCmd("git", "--version", outstream=return_buffer) git_version = return_buffer.getvalue().strip() return_buffer.close() VersionAggregator.GetVersionAggregator().ReportVersion( "Git", git_version, VersionAggregator.VersionTypes.TOOL) min_git = "2.11.0" # This code is highly specific to the return value of "git version"... cur_git = ".".join(git_version.split(' ')[2].split(".")[:3]) if version_compare(min_git, cur_git) > 0: raise RuntimeError( "Please upgrade Git! Current version is %s. Minimum is %s." % (cur_git, min_git)) # Initialized the build environment. return SelfDescribingEnvironment.BootstrapEnvironment( my_workspace_path, my_project_scope)
def build_process(my_workspace_path, my_project_scope, my_module_pkg_paths, worker_module, logging_mode="standard"): """The common entry point for building a project or platform target Positional arguments: my_workspace_path my_project_scope my_module_pkg_paths worker_module -- the name of the Python module to be invoked for building. must contain a subclass of UefiBuild and must already exist in sys.path Keyword arguments: logging_mode -- deprecated (will be removed from future interfaces) """ # # Initialize file-based logging. # log_directory = os.path.join(my_workspace_path, "Build") # TODO get the logging mode to determine the log level we should output at? logfile, filelogger = MuLogging.setup_txt_logger(log_directory, "BUILDLOG", logging.DEBUG) mdfile, mdlogger = MuLogging.setup_markdown_logger(log_directory, "BUILDLOG", logging.DEBUG) logging.info("Log Started: " + datetime.strftime(datetime.now(), "%A, %B %d, %Y %I:%M%p")) logging.info("Running Python version: " + str(sys.version_info)) display_pip_package_info(PIP_PACKAGES_LIST) # # Next, get the environment set up. # try: (build_env, shell_env) = minimum_env_init(my_workspace_path, my_project_scope) if not SelfDescribingEnvironment.VerifyEnvironment( my_workspace_path, my_project_scope): raise RuntimeError("Validation failed.") except Exception: raise RuntimeError( "Environment is not in a state to build! Please run '--UPDATE'.") # Load plugins logging.log(MuLogging.SECTION, "Loading Plugins") pluginManager = PluginManager.PluginManager() failedPlugins = pluginManager.SetListOfEnvironmentDescriptors( build_env.plugins) if failedPlugins: logging.critical("One or more plugins failed to load. Halting build.") for a in failedPlugins: logging.error("Failed Plugin: {0}".format(a["name"])) raise Exception("One or more plugins failed to load.") helper = PluginManager.HelperFunctions() if (helper.LoadFromPluginManager(pluginManager) > 0): raise Exception("One or more helper plugins failed to load.") # NOTE: This implicitly assumes that the path to the module # identified by 'worker_module' is in PYTHONPATH. PlatformBuildWorker = __import__(worker_module) PlatformBuilder = PlatformBuildWorker.PlatformBuilder # # Now we can actually kick off a build. # logging.log(MuLogging.SECTION, "Kicking off build") PB = PlatformBuilder(my_workspace_path, my_module_pkg_paths, pluginManager, helper, sys.argv) retcode = PB.Go() logging.log(MuLogging.SECTION, "Summary") if (retcode != 0): MuLogging.log_progress("Error") else: MuLogging.log_progress("Success") # always output the location of the log file MuLogging.log_progress("Log file at " + logfile) # get all vars needed as we can't do any logging after shutdown otherwise our log is cleared. # Log viewer ep = PB.env.GetValue("LaunchBuildLogProgram") LogOnSuccess = PB.env.GetValue("LaunchLogOnSuccess", default="FALSE") LogOnError = PB.env.GetValue("LaunchLogOnError", default="FALSE") # end logging logging.shutdown() # no more logging if (ep is not None): cmd = ep + " " + logfile # # Conditionally launch the shell to show build log # # if (((retcode != 0) and (LogOnError.upper() == "TRUE")) or (LogOnSuccess.upper() == "TRUE")): subprocess.Popen(cmd, shell=True) sys.exit(retcode)
def setup_process(my_workspace_path, my_project_scope, my_required_repos, force_it=False, cache_path=None): def log_lines(level, lines): for line in lines.split("\n"): if line != "": logging.log(level, line) # Pre-setup cleaning if "--force" is specified. if force_it: try: # Clean and reset the main repo. MuLogging.log_progress("## Cleaning the root repo...") cmd_with_output('git reset --hard', my_workspace_path) log_lines(logging.INFO, cmd_with_output('git clean -xffd', my_workspace_path)) MuLogging.log_progress("Done.\n") # Clean any submodule repos. if my_required_repos: for required_repo in my_required_repos: MuLogging.log_progress( "## Cleaning Git repository: %s..." % required_repo) required_repo_path = os.path.normpath( os.path.join(my_workspace_path, required_repo)) cmd_with_output('git reset --hard', required_repo_path) log_lines( logging.INFO, cmd_with_output('git clean -xffd', required_repo_path)) MuLogging.log_progress("Done.\n") except RuntimeError as e: logging.error("FAILED!\n") logging.error("Error while trying to clean the environment!") log_lines(logging.ERROR, str(e)) return # Grab the remaining Git repos. if my_required_repos: # Git Repos: STEP 1 -------------------------------------- # Make sure that the repos are all synced. try: MuLogging.log_progress("## Syncing Git repositories: %s..." % ", ".join(my_required_repos)) cmd_with_output( 'git submodule sync -- ' + " ".join(my_required_repos), my_workspace_path) MuLogging.log_progress("Done.\n") except RuntimeError as e: logging.error("FAILED!\n") logging.error("Error while trying to synchronize the environment!") log_lines(logging.ERROR, str(e)) return # Git Repos: STEP 2 -------------------------------------- # Iterate through all repos and see whether they should be fetched. for required_repo in my_required_repos: try: MuLogging.log_progress("## Checking Git repository: %s..." % required_repo) # Git Repos: STEP 2a --------------------------------- # Need to determine whether to skip this repo. required_repo_path = os.path.normpath( os.path.join(my_workspace_path, required_repo)) skip_repo = False # If the repo exists (and we're not forcing things) make # sure that it's not in a "dirty" state. if os.path.exists(required_repo_path) and not force_it: git_data = cmd_with_output('git diff ' + required_repo, my_workspace_path) # If anything was returned, we should skip processing the repo. # It is either on a different commit or it has local changes. if git_data != "": logging.info( "-- NOTE: Repo currently exists and appears to have local changes!" ) logging.info("-- Skipping fetch!") skip_repo = True # Git Repos: STEP 2b --------------------------------- # If we're not skipping, grab it. if not skip_repo or force_it: logging.info("## Fetching repo.") # Using RunCmd for this one because the c.wait blocks incorrectly somehow. cmd_string = "submodule update --init --recursive --progress" if cache_path is not None: cmd_string += " --reference " + cache_path cmd_string += " " + required_repo RunCmd('git', cmd_string, workingdir=my_workspace_path) MuLogging.log_progress("Done.\n") except RuntimeError as e: logging.error("FAILED!\n") logging.error("Failed to fetch required repository!\n") log_lines(logging.ERROR, str(e)) # Now that we should have all of the required code, # we're ready to build the environment and fetch the # dependencies for this project. MuLogging.log_progress("## Fetching all external dependencies...") (build_env, shell_env) = minimum_env_init(my_workspace_path, my_project_scope) SelfDescribingEnvironment.UpdateDependencies(my_workspace_path, my_project_scope) MuLogging.log_progress("Done.\n")
def main(): # Parse command line arguments PROJECT_SCOPES = ("project_mu", ) buildArgs = get_mu_config() mu_config_filepath = os.path.abspath(buildArgs.mu_config) if mu_config_filepath is None or not os.path.isfile(mu_config_filepath): raise FileNotFoundError("Invalid path to mu.json file for build: ", mu_config_filepath) # have a build config file with open(mu_config_filepath, 'r') as mu_config_file: mu_config = yaml.safe_load(mu_config_file) WORKSPACE_PATH = os.path.realpath( os.path.join(os.path.dirname(mu_config_filepath), mu_config["RelativeWorkspaceRoot"])) # Setup the logging to the file as well as the console MuLogging.clean_build_logs(WORKSPACE_PATH) buildlog_path = os.path.join(WORKSPACE_PATH, "Build", "BuildLogs") logging.getLogger("").setLevel(logging.NOTSET) filename = "BUILDLOG_MASTER" MuLogging.setup_section_level() MuLogging.setup_txt_logger(buildlog_path, filename) MuLogging.setup_markdown_logger(buildlog_path, filename) MuLogging.setup_console_logging(use_azure_colors=buildArgs.use_azure_color, use_color=buildArgs.color_enabled, logging_level=logging.WARNING) # Get scopes from config file if "Scopes" in mu_config: PROJECT_SCOPES += tuple(mu_config["Scopes"]) omnicache_path = None if "ReferencePath" in mu_config: omnicache_path = mu_config["ReferencePath"] if buildArgs.omnicache_path is not None: omnicache_path = buildArgs.omnicache_path # SET PACKAGE PATH # # Get Package Path from config file pplist = list() if (mu_config["RelativeWorkspaceRoot"] != ""): # this package is not at workspace root. # Add self pplist.append(os.path.dirname(mu_config_filepath)) # Include packages from the config file if "PackagesPath" in mu_config: for a in mu_config["PackagesPath"]: pplist.append(a) # Check Dependencies for Repo if "Dependencies" in mu_config: logging.log(MuLogging.SECTION, "Resolving Git Repos") pplist.extend( RepoResolver.resolve_all(WORKSPACE_PATH, mu_config["Dependencies"], ignore=buildArgs.git_ignore, force=buildArgs.git_force, update_ok=buildArgs.git_update, omnicache_dir=omnicache_path)) # make Edk2Path object to handle all path operations edk2path = Edk2Path(WORKSPACE_PATH, pplist) logging.info("Running ProjectMu Build: {0}".format(mu_config["Name"])) logging.info("WorkSpace: {0}".format(edk2path.WorkspacePath)) logging.info("Package Path: {0}".format(edk2path.PackagePathList)) logging.info("mu_build version: {0}".format( pkg_resources.get_distribution("mu_build").version)) logging.info("mu_python_library version: " + pkg_resources.get_distribution("mu_python_library").version) logging.info("mu_environment version: " + pkg_resources.get_distribution("mu_environment").version) # which package to build packageList = mu_config["Packages"] # # If mu pk list supplied lets see if they are a file system path # If so convert to edk2 relative path # # if (len(buildArgs.pkglist) > 0): packageList = [] # clear it for mu_pk_path in buildArgs.pkglist: # if abs path lets convert if os.path.isabs(mu_pk_path): temp = edk2path.GetEdk2RelativePathFromAbsolutePath(mu_pk_path) if (temp is not None): packageList.append(temp) else: logging.critical( "pkg-dir invalid absolute path: {0}".format(mu_pk_path)) raise FileNotFoundError("Invalid Package Path") else: # Check if relative path temp = os.path.join(os.getcwd(), mu_pk_path) temp = edk2path.GetEdk2RelativePathFromAbsolutePath(temp) if (temp is not None): packageList.append(temp) else: logging.critical( "pkg-dir invalid relative path: {0}".format(mu_pk_path)) raise FileNotFoundError("Invalid Package Path") # Bring up the common minimum environment. logging.log(MuLogging.SECTION, "Bootstrapping Enviroment") (build_env, shell_env) = SelfDescribingEnvironment.BootstrapEnvironment( edk2path.WorkspacePath, PROJECT_SCOPES) CommonBuildEntry.update_process(edk2path.WorkspacePath, PROJECT_SCOPES) env = ShellEnvironment.GetBuildVars() archSupported = " ".join(mu_config["ArchSupported"]) env.SetValue("TARGET_ARCH", archSupported, "Platform Hardcoded") # Generate consumable XML object- junit format JunitReport = MuJunitReport() # Keep track of failures failure_num = 0 total_num = 0 # Load plugins logging.log(MuLogging.SECTION, "Loading plugins") pluginManager = PluginManager.PluginManager() failedPlugins = pluginManager.SetListOfEnvironmentDescriptors( build_env.plugins) if failedPlugins: logging.critical("One or more plugins failed to load. Halting build.") for a in failedPlugins: logging.error("Failed Plugin: {0}".format(a["name"])) raise RuntimeError("One or more plugins failed to load.") helper = PluginManager.HelperFunctions() if (helper.LoadFromPluginManager(pluginManager) > 0): raise RuntimeError("One or more helper plugins failed to load.") pluginList = pluginManager.GetPluginsOfClass(PluginManager.IMuBuildPlugin) # Check to make sure our configuration is valid ConfigValidator.check_mu_confg(mu_config, edk2path, pluginList) for pkgToRunOn in packageList: # # run all loaded MuBuild Plugins/Tests # logging.log(MuLogging.SECTION, "Building {0} Package".format(pkgToRunOn)) logging.info("Running on Package: {0}".format(pkgToRunOn)) ts = JunitReport.create_new_testsuite( pkgToRunOn, "MuBuild.{0}.{1}".format(mu_config["GroupName"], pkgToRunOn)) packagebuildlog_path = os.path.join(buildlog_path, pkgToRunOn) _, txthandle = MuLogging.setup_txt_logger( packagebuildlog_path, "BUILDLOG_{0}".format(pkgToRunOn), logging_level=logging.DEBUG, isVerbose=True) _, mdhandle = MuLogging.setup_markdown_logger( packagebuildlog_path, "BUILDLOG_{0}".format(pkgToRunOn), logging_level=logging.DEBUG, isVerbose=True) loghandle = [txthandle, mdhandle] ShellEnvironment.CheckpointBuildVars() env = ShellEnvironment.GetBuildVars() # load the package level .mu.json pkg_config_file = edk2path.GetAbsolutePathOnThisSytemFromEdk2RelativePath( os.path.join(pkgToRunOn, pkgToRunOn + ".mu.yaml")) if (pkg_config_file): with open(pkg_config_file, 'r') as f: pkg_config = yaml.safe_load(f) else: logging.info("No Pkg Config file for {0}".format(pkgToRunOn)) pkg_config = dict() # check the resulting configuration ConfigValidator.check_package_confg(pkgToRunOn, pkg_config, pluginList) # get all the defines from the package configuration if "Defines" in pkg_config: for definition_key in pkg_config["Defines"]: definition = pkg_config["Defines"][definition_key] env.SetValue(definition_key, definition, "MuBuild.py from PkgConfig yaml", False) for Descriptor in pluginList: # Get our targets targets = ["DEBUG"] if Descriptor.Obj.IsTargetDependent() and "Targets" in mu_config: targets = mu_config["Targets"] for target in targets: MuLogging.log_progress("--Running {2}: {0} {1} --".format( Descriptor.Name, target, pkgToRunOn)) total_num += 1 ShellEnvironment.CheckpointBuildVars() env = ShellEnvironment.GetBuildVars() env.SetValue("TARGET", target, "MuBuild.py before RunBuildPlugin") (testcasename, testclassname) = Descriptor.Obj.GetTestName(pkgToRunOn, env) tc = ts.create_new_testcase(testcasename, testclassname) # create the stream for the build log plugin_output_stream = MuLogging.create_output_stream() # merge the repo level and package level for this specific plugin pkg_plugin_configuration = merge_config( mu_config, pkg_config, Descriptor.descriptor) # perhaps we should ask the validator to run on the # Check if need to skip this particular plugin if "skip" in pkg_plugin_configuration and pkg_plugin_configuration[ "skip"]: tc.SetSkipped() MuLogging.log_progress("--->Test Skipped! %s" % Descriptor.Name) else: try: # - package is the edk2 path to package. This means workspace/packagepath relative. # - edk2path object configured with workspace and packages path # - any additional command line args # - RepoConfig Object (dict) for the build # - PkgConfig Object (dict) # - EnvConfig Object # - Plugin Manager Instance # - Plugin Helper Obj Instance # - testcase Object used for outputing junit results # - output_stream the StringIO output stream from this plugin rc = Descriptor.Obj.RunBuildPlugin( pkgToRunOn, edk2path, sys.argv, mu_config, pkg_plugin_configuration, env, pluginManager, helper, tc, plugin_output_stream) except Exception as exp: exc_type, exc_value, exc_traceback = sys.exc_info() logging.critical("EXCEPTION: {0}".format(exp)) exceptionPrint = traceback.format_exception( type(exp), exp, exc_traceback) logging.critical(" ".join(exceptionPrint)) tc.SetError("Exception: {0}".format(exp), "UNEXPECTED EXCEPTION") rc = 1 if (rc != 0): failure_num += 1 if (rc is None): logging.error( "--->Test Failed: %s returned NoneType" % Descriptor.Name) else: logging.error("--->Test Failed: %s returned %d" % (Descriptor.Name, rc)) else: MuLogging.log_progress( "--->Test Success {0} {1}".format( Descriptor.Name, target)) # revert to the checkpoint we created previously ShellEnvironment.RevertBuildVars() # remove the logger MuLogging.remove_output_stream(plugin_output_stream) # finished target loop # Finished plugin loop MuLogging.stop_logging( loghandle) # stop the logging for this particular buildfile ShellEnvironment.RevertBuildVars() # Finished buildable file loop JunitReport.Output( os.path.join(WORKSPACE_PATH, "Build", "BuildLogs", "TestSuites.xml")) # Print Overall Success if (failure_num != 0): logging.error("Overall Build Status: Error") MuLogging.log_progress( "There were {0} failures out of {1} attempts".format( failure_num, total_num)) else: MuLogging.log_progress("Overall Build Status: Success") sys.exit(failure_num)