Example #1
0
    def GenerateEccReport(self, modify_dir_list: List[str], ecc_diff_range: Dict[str, List[Tuple[int, int]]],
                          edk2_path: str) -> None:
        ecc_need = False
        ecc_run = True
        config = os.path.join(edk2_path, "BaseTools", "Source", "Python", "Ecc", "config.ini")
        exception = os.path.join(edk2_path, "BaseTools", "Source", "Python", "Ecc", "exception.xml")
        report = os.path.join(edk2_path, "Ecc.csv")
        for modify_dir in modify_dir_list:
            target = os.path.join(edk2_path, modify_dir)
            logging.info('Run ECC tool for the commit in %s' % modify_dir)
            ecc_need = True
            ecc_params = "-c {0} -e {1} -t {2} -r {3}".format(config, exception, target, report)
            return_code = RunCmd("Ecc", ecc_params, workingdir=edk2_path)
            if return_code != 0:
                ecc_run = False
                break
            if not ecc_run:
                logging.error('Fail to run ECC tool')
            self.ParseEccReport(ecc_diff_range, edk2_path)

        if not ecc_need:
            logging.info("Doesn't need run ECC check")

        revert_params = "checkout -- {}".format(exception)
        RunCmd("git", revert_params)
        return
    def MakeDrive(self, size: int=60):
        ret = RunCmd("VHDCreate", f'-sz {size}MB {self.path_to_vhd}')
        if ret != 0:
            logging.error("Failed to create VHD")
            return ret

        ret = RunCmd("DiskFormat", f"-ft fat -ptt bios {self.path_to_vhd}")
        if ret != 0:
            logging.error("Failed to format VHD")
            return ret
        return ret
    def _get_files_that_changed_in_this_pr(self, base_branch) -> tuple:
        ''' Get all the files that changed in this pr.
            Return the error code and list of files
        '''

        # get file differences between pr and base
        output = StringIO()
        cmd_params = f"diff --name-only HEAD..{base_branch}"
        rc = RunCmd("git", cmd_params, outstream=output)

        if (rc == 0):
            self.logger.debug("git diff command returned successfully!")
        else:
            self.logger.critical("git diff returned error return value: %s" %
                                 str(rc))
            return (rc, [])

        if (output.getvalue() is None):
            self.logger.info("No files listed in diff")
            return (0, [])

        files = output.getvalue().split()
        for f in files:
            self.logger.debug(f"File Changed: {f}")
        return (0, files)
Example #4
0
    def _initialize_app_info(self) -> None:
        """
        Initialize Uncrustify application information.

        This function will determine the application path and version.
        """
        # Verify Uncrustify is specified in the environment.
        if UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY not in os.environ:
            raise UncrustifyAppEnvVarNotFoundException(
                f"Uncrustify environment variable {UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY} is not present.")

        self._app_path = shutil.which('uncrustify', path=os.environ[UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY])

        if self._app_path is None:
            raise FileNotFoundError(
                errno.ENOENT, os.strerror(errno.ENOENT), self._app_path)

        self._app_path = os.path.normcase(os.path.normpath(self._app_path))

        if not os.path.isfile(self._app_path):
            raise FileNotFoundError(
                errno.ENOENT, os.strerror(errno.ENOENT), self._app_path)

        # Verify Uncrustify is present at the expected path.
        return_buffer = StringIO()
        ret = RunCmd(self._app_path, "--version", outstream=return_buffer)
        if (ret != 0):
            raise UncrustifyAppVersionErrorException(
                f"Error occurred executing --version: {ret}.")

        # Log Uncrustify version information.
        self._app_version = return_buffer.getvalue().strip()
        self._tc.LogStdOut(f"Uncrustify version: {self._app_version}")
        version_aggregator.GetVersionAggregator().ReportVersion(
            "Uncrustify", self._app_version, version_aggregator.VersionTypes.INFO)
Example #5
0
    def _get_git_ignored_paths(self) -> List[str]:
        """"
        Returns a list of file absolute path strings to all files ignored in this git repository.

        If git is not found, an empty list will be returned.
        """
        if not shutil.which("git"):
            logging.warn(
                "Git is not found on this system. Git submodule paths will not be considered."
            )
            return []

        outstream_buffer = StringIO()
        exit_code = RunCmd("git",
                           "ls-files --other",
                           workingdir=self._abs_workspace_path,
                           outstream=outstream_buffer,
                           logging_level=logging.NOTSET)
        if (exit_code != 0):
            raise UncrustifyGitIgnoreFileException(
                f"An error occurred reading git ignore settings. This will prevent Uncrustify from running against the expected set of files."
            )

        # Note: This will potentially be a large list, but at least sorted
        return outstream_buffer.getvalue().strip().splitlines()
Example #6
0
 def _attempt_nuget_install(self, install_dir, non_interactive=True):
     #
     # fetch the contents of the package.
     #
     package_name = self.name
     cmd = NugetDependency.GetNugetCmd()
     cmd += ["install", self.name]
     cmd += ["-Source", self.source]
     cmd += ["-ExcludeVersion"]
     if non_interactive:
         cmd += ["-NonInteractive"]
     cmd += ["-Version", self.version]
     cmd += ["-Verbosity", "detailed"]
     cmd += ["-OutputDirectory", '"' + install_dir + '"']
     # make sure to capture our output
     output_stream = StringIO()
     ret = RunCmd(cmd[0], " ".join(cmd[1:]), outstream=output_stream)
     output_stream.seek(0)  # return the start of the stream
     # check if we found credential providers
     found_cred_provider = False
     for out_line in output_stream:
         line = out_line.strip()
         if line.startswith("CredentialProvider") or line.startswith("[CredentialProvider"):
             found_cred_provider = True
         if line.endswith("as a credential provider plugin."):
             found_cred_provider = True
     # if we fail, then we should retry if we have credential providers
     # we currently steal command input so if we don't have cred providers, we hang
     # this gives cred providers a chance to prompt for input since they don't use stdin
     if ret != 0:
         # If we're in non interactive and we have a credential provider
         if non_interactive and found_cred_provider:  # we should be interactive next time
             self._attempt_nuget_install(install_dir, False)
         else:
             raise RuntimeError(f"[Nuget] We failed to install this version {self.version} of {package_name}")
    def Push(self, nuPackage, apikey):
        if (not os.path.isfile(nuPackage)):
            raise Exception("Invalid file path for NuPkg file")
        logging.debug("Pushing %s file to server %s" %
                      (nuPackage, self.ConfigData["server_url"]))

        cmd = NugetDependency.GetNugetCmd()
        cmd += ["push", nuPackage]
        cmd += ["-Verbosity", "detailed"]
        # cmd += ["-NonInteractive"]
        cmd += ["-Source", self.ConfigData["server_url"]]
        cmd += ["-ApiKey", apikey]
        output_buffer = StringIO()
        ret = RunCmd(cmd[0], " ".join(cmd[1:]), outstream=output_buffer)

        if (ret != 0):
            # Rewind the buffer and capture the contents.
            output_buffer.seek(0)
            output_contents = output_buffer.read()

            # Check for the API message.
            if "API key is invalid".lower() in output_contents.lower():
                logging.critical(
                    "API key is invalid. Please use --ApiKey to provide a valid key."
                )

            # Generic error.
            logging.error("Failed on nuget commend.  RC = 0x%x" % ret)

        return ret
Example #8
0
    def _get_git_submodule_paths(self) -> List[str]:
        """
        Returns a list of directory absolute path strings to the root of each submodule in the workspace repository.

        If git is not found, an empty list will be returned.
        """
        if not shutil.which("git"):
            logging.warn(
                "Git is not found on this system. Git submodule paths will not be considered.")
            return []

        if os.path.isfile(os.path.join(self._abs_workspace_path, ".gitmodules")):
            logging.info(
                f".gitmodules file found. Excluding submodules in {self._package_name}.")

            outstream_buffer = StringIO()
            exit_code = RunCmd("git", "config --file .gitmodules --get-regexp path", workingdir=self._abs_workspace_path, outstream=outstream_buffer, logging_level=logging.NOTSET)
            if (exit_code != 0):
                raise UncrustifyGitSubmoduleException(
                    f".gitmodule file detected but an error occurred reading the file. Cannot proceed with unknown submodule paths.")

            submodule_paths = []
            for line in outstream_buffer.getvalue().strip().splitlines():
                submodule_paths.append(
                    os.path.normpath(os.path.join(self._abs_workspace_path, line.split()[1])))

            return submodule_paths
        else:
            return []
 def test_can_get_nuget_path(self):
     nuget_cmd = NugetDependency.GetNugetCmd()
     nuget_cmd += ["locals", "global-packages", "-list"]
     ret = RunCmd(nuget_cmd[0],
                  ' '.join(nuget_cmd[1:]),
                  outstream=sys.stdout)
     self.assertEqual(ret, 0)  # make sure we have a zero return code
def FindWithVsWhere(products: str = "*", vs_version: str = None):
    cmd = "-latest -nologo -all -property installationPath"
    vs_where_path = GetVsWherePath()
    if vs_where_path is None:
        logging.warning("We weren't able to find VSWhere")
        return (1, None)
    if (products is not None):
        cmd += " -products " + products
    if (vs_version is not None):
        vs_version = vs_version.lower()
        if vs_version in supported_vs_versions.keys():
            cmd += " -version " + supported_vs_versions[vs_version]
        else:
            logging.warning("Invalid or unsupported vs_version " + vs_version)
            return (2, None)
    a = StringIO()
    ret = RunCmd(vs_where_path, cmd, outstream=a)
    if (ret != 0):
        a.close()
        return (ret, None)
    p1 = a.getvalue().strip()
    a.close()
    if (len(p1.strip()) > 0):
        return (0, p1)
    return (ret, None)
Example #11
0
    def MakeCat(self, OutputCatFile, PathToInf2CatTool=None):
        # Find Inf2Cat tool
        if (PathToInf2CatTool is None):
            PathToInf2CatTool = FindToolInWinSdk("Inf2Cat.exe")
        # check if exists
        if not os.path.exists(PathToInf2CatTool):
            raise Exception(
                "Can't find Inf2Cat on this machine.  Please install the Windows 10 WDK - "
                "https://developer.microsoft.com/en-us/windows/hardware/windows-driver-kit"
            )

        # Adjust for spaces in the path (when calling the command).
        if " " in PathToInf2CatTool:
            PathToInf2CatTool = '"' + PathToInf2CatTool + '"'

        OutputFolder = os.path.dirname(OutputCatFile)
        # Make Cat file
        cmd = "/driver:. /os:" + self.OperatingSystem + "_" + self.Arch + " /verbose"
        ret = RunCmd(PathToInf2CatTool, cmd, workingdir=OutputFolder)
        if (ret != 0):
            raise Exception("Creating Cat file Failed with errorcode %d" % ret)
        if (not os.path.isfile(OutputCatFile)):
            raise Exception("CAT file (%s) not created" % OutputCatFile)

        return 0
def _GetCommitHashes(root_dir: os.PathLike):
    # Recursively looks at every .git and gets the commit from there
    search_path = os.path.join(root_dir, "**", ".git")
    logging.info(f"Searching {search_path} for git repos")
    search = glob.iglob(search_path, recursive=True)
    found_repos = {}
    cmd_args = "rev-parse HEAD"
    for git_path in search:
        git_path_dir = os.path.dirname(git_path)
        if git_path_dir == root_dir:
            git_repo_name = "MU_BASECORE"
        else:
            git_repo_name = os.path.relpath(git_path_dir, root_dir)
        git_repo_name = git_repo_name.upper()
        if git_repo_name in found_repos:
            raise RuntimeError(
                f"we've already found this repo before {git_repo_name} {git_path_dir}"
            )
        # read the git hash for this repo
        return_buffer = StringIO()
        RunCmd("git",
               cmd_args,
               workingdir=git_path_dir,
               outstream=return_buffer)
        commit_hash = return_buffer.getvalue().strip()
        return_buffer.close()
        found_repos[git_repo_name] = commit_hash
    return found_repos
    def GetRequiredSubmodules(self):
        ''' return iterable containing RequiredSubmodule objects.
        If no RequiredSubmodules return an empty iterable
        '''
        rs = []
        # intentionally declare this one with recursive false to avoid overhead
        rs.append(
            RequiredSubmodule("CryptoPkg/Library/OpensslLib/openssl", False))

        # To avoid maintenance of this file for every new submodule
        # lets just parse the .gitmodules and add each if not already in list.
        # The GetRequiredSubmodules is designed to allow a build to optimize
        # the desired submodules but it isn't necessary for this repository.
        result = io.StringIO()
        ret = RunCmd("git",
                     "config --file .gitmodules --get-regexp path",
                     workingdir=self.GetWorkspaceRoot(),
                     outstream=result)
        # Cmd output is expected to look like:
        # submodule.CryptoPkg/Library/OpensslLib/openssl.path CryptoPkg/Library/OpensslLib/openssl
        # submodule.SoftFloat.path ArmPkg/Library/ArmSoftFloatLib/berkeley-softfloat-3
        if ret == 0:
            for line in result.getvalue().splitlines():
                _, _, path = line.partition(" ")
                if path is not None:
                    if path not in [x.path for x in rs]:
                        rs.append(RequiredSubmodule(
                            path,
                            True))  # add it with recursive since we don't know
        return rs
    def _PublishNuget(self):
        # otherwise do the upload
        logging.critical("PUBLISHING TO NUGET")
        # get the root directory of mu_basecore
        scriptDir = self.sp
        rootDir = self.ws
        build_dir = os.path.join(rootDir, "Build")

        if (self.should_dump_version):
            print(
                "##vso[task.setvariable variable=NugetPackageVersion;isOutput=true]"
                + self.nuget_version)

        config_file = "SharedNetworking.config.json"

        if self.api_key is not None:
            logging.info("Will attempt to publish as well")
            params = "--Operation PackAndPush --ConfigFilePath {0} --Version {1} --InputFolderPath {2}  --ApiKey {3}".format(
                config_file, self.nuget_version, self.OUTPUT_DIR, self.api_key)
        else:
            params = "--Operation Pack --ConfigFilePath {0} --Version {1} --InputFolderPath {2} --OutputFolderPath {3}".format(
                config_file, self.nuget_version, self.OUTPUT_DIR, build_dir)
        # TODO: change this from a runcmd to directly invoking nuget publishing
        ret = RunCmd("nuget-publish",
                     params,
                     capture=True,
                     workingdir=scriptDir)
        if ret != 0:
            logging.error("Unable to pack/publish nuget package")
            return False
        logging.critical(
            "Finished packaging/publishing Nuget version {0}".format(
                self.nuget_version))
        return True
 def _GetReleaseForCommit(self, commit_hash: str, n: int = 100):
     if n > 2000:
         logging.error(
             "We couldn't find the release branch that we correspond to")
         return "0.0.0.0"
     git_dir = os.path.dirname(self.sp)
     cmd_args = ["log", '--format="%h %D"', "-n " + str(n)]
     return_buffer = StringIO()
     RunCmd("git",
            " ".join(cmd_args),
            workingdir=git_dir,
            outstream=return_buffer)
     return_buffer.seek(0)
     results = return_buffer.readlines()
     return_buffer.close()
     log_re = re.compile(r'(release|dev)/(\d{6})')
     for log_item in results:
         commit = log_item[:11]
         branch = log_item[11:].strip()
         if len(branch) == 0:
             continue
         match = log_re.search(branch)
         if match:
             logging.info("Basing our release commit off of commit " +
                          commit)
             return match.group(2)
     return self._GetReleaseForCommit(commit_hash, n * 2)
Example #16
0
def sign_in_place(sign_file_path, signature_options, signer_options):
    '''
    alternate module-specific signing interface to support particular signatures associated
    with Windows capsule files (e.g. CAT files). Takes n the signature_options and signer_options
    dictionaries that are used by capsule_tool and capsule_helper
    '''
    # NOTE: Currently, we only support the necessary algorithms for capsules.
    if signature_options['sign_alg'] != 'pkcs12':
        raise ValueError(
            f"Unsupported signature algorithm: {signature_options['sign_alg']}!"
        )
    if signature_options['hash_alg'] != 'sha256':
        raise ValueError(
            f"Unsupported hashing algorithm: {signature_options['hash_alg']}!")
    if 'key_file' not in signer_options:
        raise ValueError(
            "Must supply a key_file in signer_options for Signtool!")

    # Start building the parameters for the call.
    signtool_params = ['sign', '/a']
    signtool_params += ['/fd', signature_options['hash_alg']]
    signtool_params += ['/f', f"\"{signer_options['key_file']}\""]
    # if 'oid' in signer_options:
    #     signtool_params += ['/p7co', signer_options['oid']]
    # if 'eku' in signer_options:
    #     signtool_params += ['/u', signer_options['eku']]
    if 'key_pass' in signer_options:
        signtool_params += ['/p', signer_options['key_pass']]
    # Add basic options.
    signtool_params += ['/debug', '/v', f'"{sign_file_path}"']

    # Make the call to Signtool.
    ret = RunCmd(get_signtool_path(), " ".join(signtool_params))
    if ret != 0:
        raise RuntimeError(f"Signtool.exe returned with error: {ret}!")
    def Pack(self, version, OutputDirectory, ContentDir, RelNotesText=None):
        self.NewVersion = version

        # content must be absolute path in nuspec otherwise it is assumed
        # relative to nuspec file.
        cdir = os.path.abspath(ContentDir)

        # make nuspec file
        xmlstring = self._MakeNuspecXml(cdir, RelNotesText)
        nuspec = os.path.join(OutputDirectory, self.Name + ".nuspec")
        self.TempFileToDelete.append(nuspec)
        f = open(nuspec, "wb")
        f.write(xmlstring)
        f.close()

        # run nuget
        cmd = NugetDependency.GetNugetCmd()
        cmd += ["pack", nuspec]
        cmd += ["-OutputDirectory", '"' + OutputDirectory + '"']
        cmd += ["-Verbosity", "detailed"]
        # cmd += ["-NonInteractive"]
        ret = RunCmd(cmd[0], " ".join(cmd[1:]))

        if (ret != 0):
            logging.error("Failed on nuget commend.  RC = 0x%x" % ret)
            return ret

        self.NuPackageFile = os.path.join(
            OutputDirectory, self._GetNuPkgFileName(self.NewVersion))
        self.TempFileToDelete.append(self.NuPackageFile)
        return ret
    def clone_from(self,
                   url,
                   to_path,
                   branch=None,
                   shallow=False,
                   reference=None,
                   **kwargs):
        _logger = logging.getLogger("git.repo")
        _logger.debug("Cloning {0} into {1}".format(url, to_path))
        # make sure we get the commit if
        # use run command from utilities
        cmd = "git"
        params = ["clone"]
        if branch:
            shallow = True
            params.append(f'--branch {branch}')
            params.append('--single-branch')
        if shallow:
            # params.append("--shallow-submodules")
            params.append("--depth=5")
        if reference:
            params.append("--reference %s" % reference)
        else:
            params.append(
                "--recurse-submodules"
            )  # if we don't have a reference we can just recurse the submodules

        params.append(url)
        params.append(to_path)

        # Combine all the parameters together
        param_string = " ".join(params)

        ret = RunCmd(cmd, param_string)

        if ret != 0:
            logging.error("ERROR CLONING ")
            return None

        # if we have a reference path we must init the submodules
        if reference:
            params = ["submodule", "update", "--init", "--recursive"]
            params.append("--reference %s" % reference)
            param_string = " ".join(params)
            RunCmd(cmd, param_string)

        return Repo(to_path)
Example #19
0
    def fetch(self):
        package_name = self.name
        #
        # Before trying anything with Nuget feeds,
        # check to see whether the package is already in
        # our local cache. If it is, we avoid a lot of
        # time and network cost by copying it directly.
        #
        if self._fetch_from_cache(package_name):
            # We successfully found the package in the cache.
            # The published path may change now that the package has been unpacked.
            # Bail.
            self.published_path = self.compute_published_path()
            return

        #
        # If we are still here, the package wasn't in the cache.
        # We need to ask Nuget to find it.
        #

        #
        # First, fetch the contents of the package.
        #
        temp_directory = self.get_temp_dir()
        cmd = NugetDependency.GetNugetCmd()
        cmd += ["install", package_name]
        cmd += ["-Source", self.source]
        cmd += ["-ExcludeVersion"]
        cmd += ["-Version", self.version]
        cmd += ["-Verbosity", "detailed"]
        cmd += ["-OutputDirectory", '"' + temp_directory + '"']
        RunCmd(cmd[0], " ".join(cmd[1:]))

        #
        # Next, copy the contents of the package to the
        # final resting place.
        #
        # Depending on packaging, the package content will be in one of two
        # possible locations:
        # 1. temp_directory\package_name\package_name\
        # 2. temp_directory\package_name\
        #
        source_dir = os.path.join(temp_directory, package_name, package_name)
        if not os.path.isdir(source_dir):
            source_dir = os.path.join(temp_directory, package_name)
        shutil.move(source_dir, self.contents_dir)

        #
        # Add a file to track the state of the dependency.
        #
        self.update_state_file()

        #
        # Finally, delete the temp directory.
        #
        self._clean_directory(temp_directory)

        # The published path may change now that the package has been unpacked.
        self.published_path = self.compute_published_path()
Example #20
0
    def _get_url(self, remote="origin"):
        return_buffer = StringIO()
        params = "config --get remote.{0}.url".format(remote)
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        return p1
Example #21
0
 def _check_spelling(self, abs_file_to_check: str, abs_config_file_to_use: str) -> []:
     output = StringIO()
     ret = RunCmd(
         "cspell", f"--config {abs_config_file_to_use} {abs_file_to_check}", outstream=output)
     if ret == 0:
         return []
     else:
         return output.getvalue().strip().splitlines()
Example #22
0
    def _get_branch(self):
        return_buffer = StringIO()
        params = "rev-parse --abbrev-ref HEAD"
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        return p1
    def Build(self):
        BuildType = self.env.GetValue("TARGET")
        edk2_logging.log_progress("Running Build %s" % BuildType)

        # set target, arch, toolchain, threads, and platform
        params = "-p " + self.env.GetValue("ACTIVE_PLATFORM")
        params += " -b " + BuildType
        params += " -t " + self.env.GetValue("TOOL_CHAIN_TAG")
        # Thread number is now optional and not set in default tianocore target.txt
        if self.env.GetValue("MAX_CONCURRENT_THREAD_NUMBER") is not None:
            params += " -n " + self.env.GetValue(
                "MAX_CONCURRENT_THREAD_NUMBER")

        # Set the arch flags.  Multiple are split by space
        rt = self.env.GetValue("TARGET_ARCH").split(" ")
        for t in rt:
            params += " -a " + t

        # get the report options and setup the build command
        if (self.env.GetValue("BUILDREPORTING") == "TRUE"):
            params += " -y " + self.env.GetValue("BUILDREPORT_FILE")
            rt = self.env.GetValue("BUILDREPORT_TYPES").split(" ")
            for t in rt:
                params += " -Y " + t

        # add special processing to handle building a single module
        mod = self.env.GetValue("BUILDMODULE")
        if (mod is not None and len(mod.strip()) > 0):
            params += " -m " + mod
            edk2_logging.log_progress("Single Module Build: " + mod)
            self.SkipPostBuild = True
            self.FlashImage = False

        # attach the generic build vars
        buildvars = self.env.GetAllBuildKeyValues(BuildType)
        for key, value in buildvars.items():
            params += " -D " + key + "=" + value
        output_stream = edk2_logging.create_output_stream()

        env = shell_environment.ShellEnvironment()
        # WORKAROUND - Pin the PYTHONHASHSEED so that TianoCore build tools
        #               have consistent ordering. Addresses incremental builds.
        pre_build_env_chk = env.checkpoint()
        env.set_shell_var('PYTHONHASHSEED', '0')
        env.log_environment()
        ret = RunCmd("build", params)
        # WORKAROUND - Undo the workaround.
        env.restore_checkpoint(pre_build_env_chk)

        problems = edk2_logging.scan_compiler_output(output_stream)
        edk2_logging.remove_output_stream(output_stream)
        for level, problem in problems:
            logging.log(level, problem)

        if (ret != 0):
            return ret

        return 0
    def RunBuildPlugin(self,
                       packagename,
                       Edk2pathObj,
                       pkgconfig,
                       environment,
                       PLM,
                       PLMHelper,
                       tc,
                       output_stream=None):
        return_buffer = StringIO()
        params = "diff --unified=0 origin/master HEAD"
        RunCmd("git", params, outstream=return_buffer)
        p = return_buffer.getvalue().strip()
        patch = p.split("\n")
        return_buffer.close()

        ignore_files = []
        if "IgnoreFiles" in pkgconfig:
            ignore_files = pkgconfig["IgnoreFiles"]

        self.ok = True
        self.startcheck = False
        self.license = True
        self.all_file_pass = True
        count = len(patch)
        line_index = 0
        for line in patch:
            if line.startswith('--- /dev/null'):
                nextline = patch[line_index + 1]
                added_file = self.Readdedfileformat.search(nextline).group(1)
                added_file_extension = os.path.splitext(added_file)[1]
                if added_file_extension in self.file_extension_list and packagename in added_file:
                    if (self.IsIgnoreFile(added_file, ignore_files)):
                        line_index = line_index + 1
                        continue
                    self.startcheck = True
                    self.license = False
            if self.startcheck and self.license_format_preflix in line:
                if self.bsd2_patent in line:
                    self.license = True
            if line_index + 1 == count or patch[line_index + 1].startswith(
                    'diff --') and self.startcheck:
                if not self.license:
                    self.all_file_pass = False
                    error_message = "Invalid license in: " + added_file + " Hint: Only BSD-2-Clause-Patent is accepted."
                    logging.error(error_message)
                self.startcheck = False
                self.license = True
            line_index = line_index + 1

        if self.all_file_pass:
            tc.SetSuccess()
            return 0
        else:
            tc.SetFailed("License Check {0} Failed. ".format(packagename),
                         "LICENSE_CHECK_FAILED")
            return 1
Example #25
0
    def GetDiff(self, pkg: str) -> List[str]:
        return_buffer = StringIO()
        params = "diff --unified=0 origin/master HEAD"
        RunCmd("git", params, outstream=return_buffer)
        p = return_buffer.getvalue().strip()
        patch = p.split("\n")
        return_buffer.close()

        return patch
Example #26
0
 def _execute_uncrustify(self) -> None:
     """
     Executes Uncrustify with the initialized configuration.
     """
     output = StringIO()
     self._app_exit_code = RunCmd(
         self._app_path,
         f"-c {self._app_config_file} -F {self._app_input_file_path} --if-changed --suffix {UncrustifyCheck.FORMATTED_FILE_EXTENSION}", outstream=output)
     self._app_output = output.getvalue().strip().splitlines()
Example #27
0
    def FlashRomImage(self):
        VirtualDrive = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"),
                                    "VirtualDrive")
        os.makedirs(VirtualDrive, exist_ok=True)
        OutputPath_FV = os.path.join(self.env.GetValue("BUILD_OUTPUT_BASE"),
                                     "FV")
        Built_FV = os.path.join(OutputPath_FV, "QEMU_EFI.fd")

        # pad fd to 64mb
        with open(Built_FV, "ab") as fvfile:
            fvfile.seek(0, os.SEEK_END)
            additional = b'\0' * ((64 * 1024 * 1024) - fvfile.tell())
            fvfile.write(additional)

        # QEMU must be on that path

        # Unique Command and Args parameters per ARCH
        if (self.env.GetValue("TARGET_ARCH").upper() == "AARCH64"):
            cmd = "qemu-system-aarch64"
            args = "-M virt"
            args += " -cpu cortex-a57"  # emulate cpu
        elif (self.env.GetValue("TARGET_ARCH").upper() == "ARM"):
            cmd = "qemu-system-arm"
            args = "-M virt"
            args += " -cpu cortex-a15"  # emulate cpu
        else:
            raise NotImplementedError()

        # Common Args
        args += " -pflash " + Built_FV  # path to fw
        args += " -m 1024"  # 1gb memory
        # turn off network
        args += " -net none"
        # Serial messages out
        args += " -serial stdio"
        # Mount disk with startup.nsh
        args += f" -drive file=fat:rw:{VirtualDrive},format=raw,media=disk"

        # Conditional Args
        if (self.env.GetValue("QEMU_HEADLESS").upper() == "TRUE"):
            args += " -display none"  # no graphics

        if (self.env.GetValue("MAKE_STARTUP_NSH").upper() == "TRUE"):
            f = open(os.path.join(VirtualDrive, "startup.nsh"), "w")
            f.write("BOOT SUCCESS !!! \n")
            # add commands here
            f.write("reset -s\n")
            f.close()

        ret = RunCmd(cmd, args)

        if ret == 0xc0000005:
            # for some reason getting a c0000005 on successful return
            return 0

        return ret
 def _get_clang_version(self, clang_bin_path):
     return_buffer = StringIO()
     ret = RunCmd(os.path.join(clang_bin_path, "clang"),
                  "--version",
                  outstream=return_buffer)
     if (ret != 0):
         logging.warning("Failed to find version of clang")
         return -1
     line = return_buffer.getvalue().splitlines()[0].strip()
     return line[14:].strip()
Example #29
0
 def GetDiff(self, pkg: str, temp_diff_output: str) -> List[str]:
     patch = []
     #
     # Generate unified diff between origin/master and HEAD.
     #
     params = "diff --output={} --unified=0 origin/master HEAD".format(
         temp_diff_output)
     RunCmd("git", params)
     with open(temp_diff_output) as file:
         patch = file.read().strip().split('\n')
     return patch
Example #30
0
    def _get_bare(self):
        return_buffer = StringIO()
        params = "rev-parse --is-bare-repository"
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        if p1.lower() == "true":
            return True
        else:
            return False