def _get_bare(self): return_buffer = StringIO() params = "rev-parse --is-bare-repository" RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() if p1.lower() == "true": return True else: return False
def _fetch_from_cache(self, package_name): result = False # # We still need to use Nuget to figure out where the # "global-packages" cache is on this machine. # if self.global_cache_path is None: cmd = NugetDependency.GetNugetCmd() cmd += ["locals", "global-packages", "-list"] return_buffer = StringIO() if (RunCmd(cmd[0], " ".join(cmd[1:]), outstream=return_buffer) == 0): # Seek to the beginning of the output buffer and capture the output. return_buffer.seek(0) return_string = return_buffer.read() self.global_cache_path = return_string.strip().strip( "global-packages: ") if self.global_cache_path is None: logging.info( "Nuget was unable to provide global packages cache location.") return False # # If the path couldn't be found, we can't do anything else. # if not os.path.isdir(self.global_cache_path): logging.info( "Could not determine Nuget global packages cache location.") return False # # Now, try to locate our actual cache path nuget_version = NugetDependency.normalize_version(self.version) cache_search_path = os.path.join(self.global_cache_path, package_name.lower(), nuget_version) inner_cache_search_path = os.path.join(cache_search_path, package_name) if os.path.isdir(cache_search_path): # If we found a cache for this version, let's use it. if os.path.isdir(inner_cache_search_path): logging.info( "Local Cache found for Nuget package '%s'. Skipping fetch.", package_name) shutil.copytree(inner_cache_search_path, self.contents_dir) self.update_state_file() result = True # If this cache doesn't match our heuristic, let's warn the user. else: logging.warning( "Local Cache found for Nuget package '%s', but could not find contents. Malformed?", package_name) return result
def GetDiff(self, pkg: str, temp_diff_output: str) -> List[str]: patch = [] # # Generate unified diff between origin/master and HEAD. # params = "diff --output={} --unified=0 origin/master HEAD".format( temp_diff_output) RunCmd("git", params) with open(temp_diff_output) as file: patch = file.read().strip().split('\n') return patch
def ModuleGitHash(path): ''' gets the current git hash of the given directory that path is ''' abspath_dir = os.path.dirname(os.path.abspath(path)) git_stream = StringIO() ret = RunCmd("git", "rev-parse --verify HEAD", workingdir=abspath_dir, outstream=git_stream) if ret != 0: return None git_stream.seek(0) git_hash = git_stream.readline().strip() if git_hash.count(" ") != 0: raise RuntimeError("Unable to get GIT HASH for: " + abspath_dir) return git_hash
def sign(data, signature_options, signer_options): ''' primary signing interface. Takes n the signature_options and signer_options dictionaries that are used by capsule_tool and capsule_helper ''' # NOTE: Currently, we only support the necessary algorithms for capsules. if signature_options['sign_alg'] != 'pkcs12': raise ValueError( f"Unsupported signature algorithm: {signature_options['sign_alg']}!" ) if signature_options['hash_alg'] != 'sha256': raise ValueError( f"Unsupported hashing algorithm: {signature_options['hash_alg']}!") if 'key_file' not in signer_options: raise ValueError( "Must supply a key_file in signer_options for Signtool!") # Set up a temp directory to hold input and output files. temp_folder = tempfile.mkdtemp() in_file_path = os.path.join(temp_folder, "data_to_sign.bin") # Create the input file for Signtool. in_file = open(in_file_path, 'wb') in_file.write(data) in_file.close() # Start building the parameters for the call. signtool_params = ['sign'] signtool_params += ['/fd', signature_options['hash_alg']] signtool_params += ['/p7ce', 'DetachedSignedData'] signtool_params += ['/p7', f'"{temp_folder}"'] signtool_params += ['/f', f"\"{signer_options['key_file']}\""] if 'oid' in signer_options: signtool_params += ['/p7co', signer_options['oid']] if 'eku' in signer_options: signtool_params += ['/u', signer_options['eku']] if 'key_pass' in signer_options: signtool_params += ['/p', signer_options['key_pass']] # Add basic options. signtool_params += ['/debug', '/v', f'"{in_file_path}"'] # Make the call to Signtool. ret = RunCmd(get_signtool_path(), " ".join(signtool_params)) if ret != 0: raise RuntimeError(f"Signtool.exe returned with error: {ret}!") # Load the data from the output file and return it. out_file_path = os.path.join(temp_folder, "data_to_sign.bin.p7") out_file = open(out_file_path, 'rb') out_data = out_file.read() out_file.close() return out_data
def _get_head(self): return_buffer = StringIO() params = "rev-parse HEAD" RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() head = ObjectDict() head.set("commit", p1) return head
def _get_submodule_list(self): submodule_list = [] return_buffer = StringIO() params = "config --file .gitmodules --get-regexp path" RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() if (len(p1) > 0): submodule_list = p1.split("\n") for i in range(0, len(submodule_list)): submodule_list[i] = submodule_list[i].split(' ')[1] return submodule_list
def ModuleGitPatch(path, git_hash): ''' return a git patch of the given file since the hash ''' GitOutput = io.StringIO() # TODO - let this go to console so we get colors path_dir = os.path.dirname(path) ret = RunCmd("git", f"diff {git_hash} {path}", workingdir=path_dir, outstream=GitOutput) if ret != 0: return "" GitOutput.seek(0) result = [] for line in GitOutput.readlines(): result.append(line.strip()) return "\n".join(result)
def _check_markdown(self, rel_file_to_check: os.PathLike, abs_config_file_to_use: os.PathLike, Ignores: List) -> []: output = StringIO() param = f"--config {abs_config_file_to_use}" for a in Ignores: param += f' --ignore "{a}"' param += f' "{rel_file_to_check}"' ret = RunCmd( "markdownlint", param, outstream=output) if ret == 0: return [] else: return output.getvalue().strip().splitlines()
def _GetReleaseNote(): cmd = "log --format=%B -n 1 HEAD" return_buffer = StringIO() if (RunCmd("git", cmd, outstream=return_buffer) == 0): # Seek to the beginning of the output buffer and capture the output. return_buffer.seek(0) # read the first 155 characters and replace the return_string = return_buffer.read(155).replace("\n", " ") return_buffer.close() # TODO: figure out if there was more input and append a ... if needed return return_string.strip() else: raise RuntimeError("Unable to read release notes")
def FlashRomImage(self): VirtualDrive = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"), "VirtualDrive") os.makedirs(VirtualDrive, exist_ok=True) OutputPath_FV = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"), "FV") if (self.env.GetValue("QEMU_SKIP") and self.env.GetValue("QEMU_SKIP").upper() == "TRUE"): logging.info("skipping qemu boot test") return 0 # # QEMU must be on the path # cmd = "qemu-system-x86_64" args = "-debugcon stdio" # write messages to stdio args += " -global isa-debugcon.iobase=0x402" # debug messages out thru virtual io port args += " -net none" # turn off network args += f" -drive file=fat:rw:{VirtualDrive},format=raw,media=disk" # Mount disk with startup.nsh if (self.env.GetValue("QEMU_HEADLESS").upper() == "TRUE"): args += " -display none" # no graphics if (self.env.GetBuildValue("SMM_REQUIRE") == "1"): args += " -machine q35,smm=on" #,accel=(tcg|kvm)" #args += " -m ..." #args += " -smp ..." args += " -global driver=cfi.pflash01,property=secure,value=on" args += " -drive if=pflash,format=raw,unit=0,file=" + os.path.join( OutputPath_FV, "OVMF_CODE.fd") + ",readonly=on" args += " -drive if=pflash,format=raw,unit=1,file=" + os.path.join( OutputPath_FV, "OVMF_VARS.fd") else: args += " -pflash " + os.path.join(OutputPath_FV, "OVMF.fd") # path to firmware if (self.env.GetValue("MAKE_STARTUP_NSH").upper() == "TRUE"): f = open(os.path.join(VirtualDrive, "startup.nsh"), "w") f.write("BOOT SUCCESS !!! \n") ## add commands here f.write("reset -s\n") f.close() ret = RunCmd(cmd, args) if ret == 0xc0000005: #for some reason getting a c0000005 on successful return return 0 return ret
def get_thumbprint_from_pfx(self, pfxfilename=None): global CertMgrPath if pfxfilename == None: raise Exception("Pfx File Name is required") fp = tempfile.NamedTemporaryFile(delete=False) tfile = fp.name fp.close() try: # # Cert Manager is used for deleting the cert when add/removing certs # #1 - use Certmgr to get the PFX sha1 thumbprint if CertMgrPath is None: CertMgrPath = DFCI_SupportLib().get_certmgr_path() parameters = " /c " + pfxfilename ret = RunCmd(CertMgrPath, parameters, outfile=tfile) if (ret != 0): logging.critical( "Failed to get cert info from Pfx file using CertMgr.exe") return ret f = open(tfile, "r") pfxdetails = f.readlines() f.close() os.remove(tfile) #2 Parse the pfxdetails for the sha1 thumbprint thumbprint = "" found = False for a in pfxdetails: a = a.strip() if (len(a)): if (found): thumbprint = ''.join(a.split()) break else: if (a == "SHA1 Thumbprint::"): found = True if (len(thumbprint) != 40) or (found == False): return 'No thumbprint' except Exception as exp: traceback.print_exc() return "Unable to read certificate" return thumbprint
def PackageCapsuleHeader(InputBin, OutputBin, FmpDeviceGuid=None): logging.debug("CapsulePackage: Final Capsule Header") if(FmpDeviceGuid == None): logging.debug("CapsulePackage: Using default industry standard FMP guid") FmpDeviceGuid = "6dcbd5ed-e82d-4c44-bda1-7194199ad92a" params = "-o " + OutputBin params = params + " -g " + FmpDeviceGuid params = params + " --capsule -v -f " + InputBin params = params + " --capFlag PersistAcrossReset --capFlag InitiateReset" ret = RunCmd("genfv", params) if(ret != 0): raise Exception("GenFv Failed with errorcode" % ret) return ret
def _get_remotes(self): return_buffer = StringIO() params = "remote" new_remotes = ObjectDict() RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() remote_list = p1.split("\n") for remote in remote_list: url = ObjectDict() url.set("url", self._get_url(remote)) setattr(new_remotes, remote, url) return new_remotes
def do_post_build(self, thebuilder): ci_type = thebuilder.env.GetValue('CI_BUILD_TYPE') if ci_type != 'host_unit_test': return 0 shell_env = shell_environment.GetEnvironment() logging.log(edk2_logging.get_section_level(), "Run Host based Unit Tests") path = thebuilder.env.GetValue("BUILD_OUTPUT_BASE") for arch in thebuilder.env.GetValue("TARGET_ARCH").split(): logging.log(edk2_logging.get_subsection_level(), "Testing for architecture: " + arch) cp = os.path.join(path, arch) # If any old results XML files exist, clean them up. for old_result in glob.iglob(os.path.join(cp, "*.result.xml")): os.remove(old_result) # Determine whether any tests exist. testList = glob.glob(os.path.join(cp, "*Test*.exe")) for test in testList: # Configure output name. shell_env.set_shell_var('CMOCKA_XML_FILE', test + ".%g." + arch + ".result.xml") # Run the test. ret = RunCmd('"' + test + '"', "", workingdir=cp) if (ret != 0): logging.error("UnitTest Execution Error: " + os.path.basename(test)) else: logging.info("UnitTest Completed: " + os.path.basename(test)) file_match_pattern = test + ".*." + arch + ".result.xml" xml_results_list = glob.glob(file_match_pattern) for xml_result_file in xml_results_list: root = xml.etree.ElementTree.parse( xml_result_file).getroot() for suite in root: for case in suite: for result in case: if result.tag == 'failure': logging.warning("%s Test Failed" % os.path.basename(test)) logging.warning( " %s - %s" % (case.attrib['name'], result.text)) return 0
def _get_dirty(self): return_buffer = StringIO() params = "status --short" RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() if len(p1) > 0: return True return_buffer = StringIO() params = "log --branches --not --remotes --decorate --oneline" RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() return_buffer.close() if len(p1) > 0: return True return False
def FlashRomImage(self): VirtualDrive = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"), "VirtualDrive") os.makedirs(VirtualDrive, exist_ok=True) OutputPath_FV = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"), "FV") # # QEMU must be on the path # cmd = "qemu-system-x86_64" args = "-debugcon stdio" # write messages to stdio args += " -global isa-debugcon.iobase=0x402" # debug messages out thru virtual io port args += " -net none" # turn off network #args += f" -drive file=fat:rw:{VirtualDrive},format=raw,media=disk" # Mount disk with startup.nsh #args += " -cdrom /home/gttyson/edk2/VM/ubuntu-20.04.3-desktop-amd64.iso" # GARAND: Addition to boot #args += f" -drive file=fat:rw:/home/user/UEFI_workspace/support/chipsec_drive/ -m 1024" # GARAND: Addition to boot args += " -drive file=/home/gttyson/edk2/VM/vm.disk,if=virtio,format=raw" # Persistent Unbuntu VM disk args += " -enable-kvm -m 4G" if (self.env.GetValue("QEMU_HEADLESS").upper() == "TRUE"): args += " -display none" # no graphics if (self.env.GetBuildValue("SMM_REQUIRE") == "1"): args += " -machine q35,smm=on" #,accel=(tcg|kvm)" #args += " -m ..." #args += " -smp ..." args += " -global driver=cfi.pflash01,property=secure,value=on" args += " -drive if=pflash,format=raw,unit=0,file=" + os.path.join(OutputPath_FV, "OVMF_CODE.fd") + ",readonly=on" args += " -drive if=pflash,format=raw,unit=1,file=" + os.path.join(OutputPath_FV, "OVMF_VARS.fd") else: args += " -pflash " + os.path.join(OutputPath_FV, "OVMF.fd") # path to firmware if (self.env.GetValue("MAKE_STARTUP_NSH").upper() == "TRUE"): f = open(os.path.join(VirtualDrive, "startup.nsh"), "w") f.write("BOOT SUCCESS !!! \n") ## add commands here # GARAND: Add built efi apps here f.write("reset -s\n") f.close() ret = RunCmd(cmd, args) if ret == 0xc0000005: #for some reason getting a c0000005 on successful return return 0 return ret
def pull(self): return_buffer = StringIO() params = "pull" ret = RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() if ret != 0: self._logger.error(p1) return False return True
def checkout(self, branch=None, commit=None): return_buffer = StringIO() if branch is not None: params = "checkout %s" % branch elif commit is not None: params = "checkout %s" % commit ret = RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() if ret != 0: self._logger.debug(p1) return False return True
def submodule(self, command, *args): self._logger.debug("Calling command on submodule {0} with {1}".format( command, args)) return_buffer = StringIO() flags = " ".join(args) params = "submodule {0} {1}".format(command, flags) ret = RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() if ret != 0: self._logger.error(p1) return False return True
def _DownloadNugetPackageVersion(self, package_name: str, version: str, destination: os.PathLike, source=None): cmd = NugetDependency.GetNugetCmd() cmd += ["install", package_name] if source is not None: cmd += ["-Source", source] cmd += ["-ExcludeVersion"] cmd += ["-Version", version] cmd += ["-Verbosity", "detailed"] cmd += ["-OutputDirectory", '"' + destination + '"'] ret = RunCmd(cmd[0], " ".join(cmd[1:])) if ret != 0: return False else: return True
def _GetLatestNugetVersion(package_name, source=None): cmd = NugetDependency.GetNugetCmd() cmd += ["list"] cmd += [package_name] if source is not None: cmd += ["-Source", source] return_buffer = StringIO() if (RunCmd(cmd[0], " ".join(cmd[1:]), outstream=return_buffer) == 0): # Seek to the beginning of the output buffer and capture the output. return_buffer.seek(0) return_string = return_buffer.readlines() return_buffer.close() for line in return_string: line = line.strip() if line.startswith(package_name): return line.replace(package_name, "").strip() else: return "0.0.0.0"
def FindWithVsWhere(products="*"): cmd = "-latest -nologo -all -property installationPath" vs_where_path = GetVsWherePath() if vs_where_path is None: logging.warning("We weren't able to find VSWhere") return (1, None) if(products is not None): cmd += " -products " + products a = StringIO() ret = RunCmd(vs_where_path, cmd, outstream=a) if(ret != 0): a.close() return (ret, None) p1 = a.getvalue().strip() a.close() if(len(p1.strip()) > 0): return (0, p1) return (ret, None)
def _execute_uncrustify(self) -> None: """ Executes Uncrustify with the initialized configuration. """ # MU_CHANGE [BEGIN] - Add "UNCRUSTIFY_IN_PLACE" option. output = StringIO() params = ['-c', self._app_config_file] params += ['-F', self._app_input_file_path] params += ['--if-changed'] if self._env.GetValue("UNCRUSTIFY_IN_PLACE", "FALSE") == "TRUE": params += ['--replace', '--no-backup'] else: params += ['--suffix', UncrustifyCheck.FORMATTED_FILE_EXTENSION] self._app_exit_code = RunCmd(self._app_path, " ".join(params), outstream=output) # MU_CHANGE [END] - Add "UNCRUSTIFY_IN_PLACE" option. self._app_output = output.getvalue().strip().splitlines()
def MakeCat(self, OutputCatFile, PathToInf2CatTool=None): # Find Inf2Cat tool if(PathToInf2CatTool is None): PathToInf2CatTool = FindToolInWinSdk("Inf2Cat.exe") # check if exists if PathToInf2CatTool is None or not os.path.exists(PathToInf2CatTool): raise Exception("Can't find Inf2Cat on this machine. Please install the Windows 10 WDK - " "https://developer.microsoft.com/en-us/windows/hardware/windows-driver-kit") OutputFolder = os.path.dirname(OutputCatFile) # Make Cat file cmd = "/driver:. /os:" + self.OperatingSystem + "_" + self.Arch + " /verbose" ret = RunCmd(PathToInf2CatTool, cmd, workingdir=OutputFolder) if(ret != 0): raise Exception("Creating Cat file Failed with errorcode %d" % ret) if(not os.path.isfile(OutputCatFile)): raise Exception("CAT file (%s) not created" % OutputCatFile) return 0
def fetch(self, remote="origin", branch=None): return_buffer = StringIO() param_list = ["fetch", remote] if branch is not None: param_list.append(f"{branch}:{branch}") params = " ".join(param_list) ret = RunCmd("git", params, workingdir=self._path, outstream=return_buffer) p1 = return_buffer.getvalue().strip() if ret != 0: self._logger.error(p1) return False return True
def GetModifyDir(self, pkg: str) -> List[str]: return_buffer = StringIO() params = "diff --name-status" + ' HEAD' + ' origin/master' RunCmd("git", params, outstream=return_buffer) p1 = return_buffer.getvalue().strip() dir_list = p1.split("\n") return_buffer.close() modify_dir_list = [] for modify_dir in dir_list: file_path = self.ReModifyFile.findall(modify_dir) if file_path: file_dir = os.path.dirname(file_path[0]) else: continue if pkg in file_dir and file_dir != pkg: modify_dir_list.append('%s' % file_dir) else: continue modify_dir_list = list(set(modify_dir_list)) return modify_dir_list
def FlashRomImage(self): ''' Use the FlashRom Function to run the emulator. This gives an easy stuart command line to activate the emulator. ''' OutputPath = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"), self.env.GetValue("TARGET_ARCH")) if (self.env.GetValue("MAKE_STARTUP_NSH") == "TRUE"): f = open(os.path.join(OutputPath, "startup.nsh"), "w") f.write("BOOT SUCCESS !!! \n") # add commands here f.write("reset\n") f.close() if GetHostInfo().os.upper() == "WINDOWS": cmd = "WinHost.exe" elif GetHostInfo().os.upper() == "LINUX": cmd = "./Host" else: logging.critical("Unsupported Host") return -1 return RunCmd(cmd, "", workingdir=OutputPath)
def _initialize_app_info(self) -> None: """ Initialize Uncrustify application information. This function will determine the application path and version. """ # Verify Uncrustify is specified in the environment. if UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY not in os.environ: raise UncrustifyAppEnvVarNotFoundException( f"Uncrustify environment variable {UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY} is not present." ) self._app_path = shutil.which( 'uncrustify', path=os.environ[UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY]) if self._app_path is None: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), self._app_path) self._app_path = os.path.normcase(os.path.normpath(self._app_path)) if not os.path.isfile(self._app_path): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), self._app_path) # Verify Uncrustify is present at the expected path. return_buffer = StringIO() ret = RunCmd(self._app_path, "--version", outstream=return_buffer) if (ret != 0): raise UncrustifyAppVersionErrorException( f"Error occurred executing --version: {ret}.") # Log Uncrustify version information. self._app_version = return_buffer.getvalue().strip() self._tc.LogStdOut(f"Uncrustify version: {self._app_version}") version_aggregator.GetVersionAggregator().ReportVersion( "Uncrustify", self._app_version, version_aggregator.VersionTypes.INFO)
def _get_git_submodule_paths(self) -> List[str]: """ Returns a list of directory absolute path strings to the root of each submodule in the workspace repository. If git is not found, an empty list will be returned. """ if not shutil.which("git"): logging.warn( "Git is not found on this system. Git submodule paths will not be considered." ) return [] if os.path.isfile(os.path.join(self._abs_workspace_path, ".gitmodules")): logging.info( f".gitmodules file found. Excluding submodules in {self._package_name}." ) outstream_buffer = StringIO() exit_code = RunCmd("git", "config --file .gitmodules --get-regexp path", workingdir=self._abs_workspace_path, outstream=outstream_buffer, logging_level=logging.NOTSET) if (exit_code != 0): raise UncrustifyGitSubmoduleException( f".gitmodule file detected but an error occurred reading the file. Cannot proceed with unknown submodule paths." ) submodule_paths = [] for line in outstream_buffer.getvalue().strip().splitlines(): submodule_paths.append( os.path.normpath( os.path.join(self._abs_workspace_path, line.split()[1]))) return submodule_paths else: return []