def Push(self, nuPackage, apikey):
        if(not os.path.isfile(nuPackage)):
            raise Exception("Invalid file path for NuPkg file")
        logging.debug("Pushing %s file to server %s" % (nuPackage, self.ConfigData["server_url"]))

        cmd = NugetDependency.GetNugetCmd()
        cmd += ["push", nuPackage]
        cmd += ["-Verbosity", "detailed"]
        # cmd += ["-NonInteractive"]
        cmd += ["-Source", self.ConfigData["server_url"]]
        cmd += ["-ApiKey", apikey]
        output_buffer = StringIO()
        ret = RunCmd(cmd[0], " ".join(cmd[1:]), outstream=output_buffer)

        if(ret != 0):
            # Rewind the buffer and capture the contents.
            output_buffer.seek(0)
            output_contents = output_buffer.read()

            # Check for the API message.
            if "API key is invalid".lower() in output_contents.lower():
                logging.critical("API key is invalid. Please use --ApiKey to provide a valid key.")

            # Generic error.
            logging.error("Failed on nuget commend.  RC = 0x%x" % ret)

        return ret
Example #2
0
 def PackageMsFmpHeader(InputBin,
                        OutputBin,
                        VersionInt,
                        LsvInt,
                        DepList=[]):
     logging.debug("CapsulePackage: Fmp Header")
     params = "-o " + OutputBin
     params = params + " --version " + hex(VersionInt).rstrip("L")
     params = params + " --lsv " + hex(LsvInt)
     params = params + " -p " + InputBin + " -v"
     #append depedency if supplied
     for dep in DepList:
         depGuid = dep[0]
         depIndex = int(dep[1])
         depMinVer = hex(dep[2])
         depFlag = hex(dep[3])
         logging.debug(
             "Adding a Dependency:\n\tFMP Guid: %s \nt\tFmp Descriptor Index: %d \n\tFmp DepVersion: %s \n\tFmp Flags: %s\n"
             % (depGuid, depIndex, depMinVer, depFlag))
         params += " --dep " + depGuid + " " + str(
             depIndex) + " " + depMinVer + " " + depFlag
     ret = RunCmd("genmspayloadheader.exe", params)
     if (ret != 0):
         raise Exception("GenMsPayloadHeader Failed with errorcode %d" %
                         ret)
     return ret
    def MakeCat(self, OutputCatFile, PathToInf2CatTool=None):
        # Find Inf2Cat tool
        if (PathToInf2CatTool is None):
            PathToInf2CatTool = FindToolInWinSdk("Inf2Cat.exe")
        # check if exists
        if not os.path.exists(PathToInf2CatTool):
            raise Exception(
                "Can't find Inf2Cat on this machine.  Please install the Windows 10 WDK - "
                "https://developer.microsoft.com/en-us/windows/hardware/windows-driver-kit"
            )

        # Adjust for spaces in the path (when calling the command).
        if " " in PathToInf2CatTool:
            PathToInf2CatTool = '"' + PathToInf2CatTool + '"'

        OutputFolder = os.path.dirname(OutputCatFile)
        # Make Cat file
        cmd = "/driver:. /os:" + self.OperatingSystem + "_" + self.Arch + " /verbose"
        ret = RunCmd(PathToInf2CatTool, cmd, workingdir=OutputFolder)
        if (ret != 0):
            raise Exception("Creating Cat file Failed with errorcode %d" % ret)
        if (not os.path.isfile(OutputCatFile)):
            raise Exception("CAT file (%s) not created" % OutputCatFile)

        return 0
Example #4
0
def GetCommitHashes(root_dir: os.PathLike):
    # Recursively looks at every .git and gets the commit from there
    search_path = os.path.join(root_dir, "**", ".git")
    search = glob.iglob(search_path, recursive=True)
    found_repos = {}
    cmd_args = "rev-parse HEAD"
    for git_path in search:
        git_path_dir = os.path.dirname(git_path)
        if git_path_dir == root_dir:
            git_repo_name = "MU_PLUS"
        else:
            _, git_repo_name = os.path.split(git_path_dir)
        git_repo_name = git_repo_name.upper()
        if git_repo_name in found_repos:
            raise RuntimeError("we've already found this repo before " +
                               git_repo_name)
        # read the git hash for this repo
        return_buffer = StringIO()
        RunCmd("git",
               cmd_args,
               workingdir=git_path_dir,
               outstream=return_buffer)
        commit_hash = return_buffer.getvalue().strip()
        return_buffer.close()
        found_repos[git_repo_name] = commit_hash
    return found_repos
    def Pack(self, version, OutputDirectory, ContentDir, RelNotesText=None):
        self.NewVersion = version

        # content must be absolute path in nuspec otherwise it is assumed
        # relative to nuspec file.
        cdir = os.path.abspath(ContentDir)

        # make nuspec file
        xmlstring = self._MakeNuspecXml(cdir, RelNotesText)
        nuspec = os.path.join(OutputDirectory, self.Name + ".nuspec")
        self.TempFileToDelete.append(nuspec)
        f = open(nuspec, "wb")
        f.write(xmlstring)
        f.close()

        # run nuget
        cmd = NugetDependency.GetNugetCmd()
        cmd += ["pack", nuspec]
        cmd += ["-OutputDirectory", '"' + OutputDirectory + '"']
        cmd += ["-Verbosity", "detailed"]
        # cmd += ["-NonInteractive"]
        ret = RunCmd(cmd[0], " ".join(cmd[1:]))

        if(ret != 0):
            logging.error("Failed on nuget commend.  RC = 0x%x" % ret)
            return ret

        self.NuPackageFile = os.path.join(OutputDirectory, self._GetNuPkgFileName(self.NewVersion))
        self.TempFileToDelete.append(self.NuPackageFile)
        return ret
    def MakeCat(self, OutputCatFile, PathToInf2CatTool=None):
        # Find Inf2Cat tool
        if (PathToInf2CatTool is None):
            PathToInf2CatTool = os.path.join(os.getenv("ProgramFiles(x86)"),
                                             "Windows Kits", "10", "bin",
                                             "x86", "Inf2Cat.exe")
            if not os.path.exists(PathToInf2CatTool):
                logging.debug("Windows Kit 10 not Found....trying 8.1")
                # Try 8.1 kit
                PathToInf2CatTool.replace("10", "8.1")

        # check if exists
        if not os.path.exists(PathToInf2CatTool):
            raise Exception(
                "Can't find Inf2Cat on this machine.  Please install the Windows 10 WDK - "
                "https://developer.microsoft.com/en-us/windows/hardware/windows-driver-kit"
            )

        # Adjust for spaces in the path (when calling the command).
        if " " in PathToInf2CatTool:
            PathToInf2CatTool = '"' + PathToInf2CatTool + '"'

        OutputFolder = os.path.dirname(OutputCatFile)
        # Make Cat file
        cmd = "/driver:. /os:" + self.OperatingSystem + "_" + self.Arch + " /verbose"
        ret = RunCmd(PathToInf2CatTool, cmd, workingdir=OutputFolder)
        if (ret != 0):
            raise Exception("Creating Cat file Failed with errorcode %d" % ret)
        if (not os.path.isfile(OutputCatFile)):
            raise Exception("CAT file (%s) not created" % OutputCatFile)

        return 0
Example #7
0
    def PlatformPostBuild(self):
        rc = 0
        os.environ["CMOCKA_MESSAGE_OUTPUT"] = self.env.GetValue("TEST_OUTPUT_FORMAT", "xml")
        logging.log(MuLogging.get_section_level(), "Run Host based Unit Tests")
        path = self.env.GetValue("BUILD_OUTPUT_BASE")
        for arch in self.env.GetValue("TARGET_ARCH").split():
            logging.log( MuLogging.get_subsection_level(), "Testing for architecture: " + arch)
            cp = os.path.join(path, arch)
            for old_result in glob.iglob(os.path.join(cp, "*.result.xml")):
                os.remove(old_result)
            testList = glob.glob(os.path.join(cp, "*Test*.exe"))
            for test in testList:
                os.environ["CMOCKA_XML_FILE"] = test + ".%g." + arch + ".result.xml"
                ret = RunCmd('"' + test + '"', "", workingdir=cp)
                if(ret != 0):
                    logging.error("UnitTest Execution Error: " + os.path.basename(test))
                    rc = ret
                else:
                    logging.info("UnitTest Completed: " + os.path.basename(test))
                    file_match_pattern = test + ".*." + arch + ".result.xml"
                    xml_results_list = glob.glob(file_match_pattern)
                    for xml_result_file in xml_results_list:
                        root = xml.etree.ElementTree.parse(xml_result_file).getroot()
                        for suite in root:
                            for case in suite:
                                for result in case:
                                    if result.tag == 'failure':
                                        logging.warning("%s Test Failed" % os.path.basename(test))
                                        logging.warning("  %s - %s" % (case.attrib['name'], result.text))

        return rc
Example #8
0
    def fetch(self):
        package_name = self.name
        #
        # Before trying anything with Nuget feeds,
        # check to see whether the package is already in
        # our local cache. If it is, we avoid a lot of
        # time and network cost by copying it directly.
        #
        if self._fetch_from_cache(package_name):
            # We successfully found the package in the cache.
            # The published path may change now that the package has been unpacked.
            # Bail.
            self.published_path = self.compute_published_path()
            return

        #
        # If we are still here, the package wasn't in the cache.
        # We need to ask Nuget to find it.
        #

        #
        # First, fetch the contents of the package.
        #
        temp_directory = self.get_temp_dir()
        cmd = NugetDependency.GetNugetCmd()
        cmd += ["install", package_name]
        cmd += ["-Source", self.source]
        cmd += ["-ExcludeVersion"]
        cmd += ["-Version", self.version]
        cmd += ["-Verbosity", "detailed"]
        cmd += ["-OutputDirectory", '"' + temp_directory + '"']
        RunCmd(cmd[0], " ".join(cmd[1:]))

        #
        # Next, copy the contents of the package to the
        # final resting place.
        #
        # Depending on packaging, the package content will be in one of two
        # possible locations:
        # 1. temp_directory\package_name\package_name\
        # 2. temp_directory\package_name\
        #
        source_dir = os.path.join(temp_directory, package_name, package_name)
        if not os.path.isdir(source_dir):
            source_dir = os.path.join(temp_directory, package_name)
        shutil.move(source_dir, self.contents_dir)

        #
        # Add a file to track the state of the dependency.
        #
        self.update_state_file()

        #
        # Finally, delete the temp directory.
        #
        self._clean_directory(temp_directory)

        # The published path may change now that the package has been unpacked.
        self.published_path = self.compute_published_path()
Example #9
0
 def PackageFmpCapsuleHeader(InputBin, OutputBin, FmpGuid):
     logging.debug("CapsulePackage: Fmp Capsule Header")
     params = "-o " + OutputBin
     params = params + " -p " + InputBin + " " + FmpGuid + " 1 0 -V"
     ret = RunCmd("genfmpcap.exe", params)
     if (ret != 0):
         raise Exception("GenFmpCap Failed with errorcode" % ret)
     return ret
Example #10
0
    def _get_branch(self):
        return_buffer = StringIO()
        params = "rev-parse --abbrev-ref HEAD"
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        return p1
Example #11
0
    def _get_url(self, remote="origin"):
        return_buffer = StringIO()
        params = "config --get remote.{0}.url".format(remote)
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        return p1
Example #12
0
    def clone_from(self,
                   url,
                   to_path,
                   progress=None,
                   env=None,
                   shallow=False,
                   reference=None,
                   **kwargs):
        _logger = logging.getLogger("git.repo")
        _logger.debug("Cloning {0} into {1}".format(url, to_path))
        # make sure we get the commit if
        # use run command from utilities
        cmd = "git"
        params = ["clone"]
        if shallow:
            params.append("--shallow-submodules")
        if reference:
            params.append("--reference %s" % reference)
        else:
            params.append(
                "--recurse-submodules"
            )  # if we don't have a reference we can just recurse the submodules
        params.append(url)
        params.append(to_path)

        # Combine all the parameters together
        param_string = " ".join(params)

        ret = RunCmd(cmd, param_string)

        if ret != 0:
            logging.error("ERROR CLONING ")
            return None

        # if we have a reference path we must init the submodules
        if reference:
            params = ["submodule", "update", "--init", "--recursive"]
            params.append("--reference %s" % reference)
            param_string = " ".join(params)
            ret = RunCmd(cmd, param_string)

        return Repo(to_path)
Example #13
0
    def _get_bare(self):
        return_buffer = StringIO()
        params = "rev-parse --is-bare-repository"
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        if p1.lower() == "true":
            return True
        else:
            return False
Example #14
0
    def _get_head(self):
        return_buffer = StringIO()
        params = "rev-parse HEAD"
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()

        head = ObjectDict()
        head.set("commit", p1)

        return head
Example #15
0
 def _get_submodule_list(self):
     submodule_list = []
     return_buffer = StringIO()
     params = "config --file .gitmodules --get-regexp path"
     RunCmd("git", params, workingdir=self._path, outstream=return_buffer)
     p1 = return_buffer.getvalue().strip()
     return_buffer.close()
     if (len(p1) > 0):
         submodule_list = p1.split("\n")
         for i in range(0, len(submodule_list)):
             submodule_list[i] = submodule_list[i].split(' ')[1]
     return submodule_list
Example #16
0
def GetReleaseNote():
    cmd = "log --format=%B -n 1 HEAD"
    return_buffer = StringIO()
    if (RunCmd("git", cmd, outstream=return_buffer) == 0):
        # Seek to the beginning of the output buffer and capture the output.
        return_buffer.seek(0)
        return_string = return_buffer.read(155).replace(
            "\n", " ")  # read the first 155 characters and replace the
        return_buffer.close()
        # TODO: figure out if there was more input and append a ... if needed
        return return_string.strip()
    else:
        raise RuntimeError("Unable to read release notes")
Example #17
0
    def _get_remotes(self):
        return_buffer = StringIO()
        params = "remote"
        new_remotes = ObjectDict()
        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)
        p1 = return_buffer.getvalue().strip()
        return_buffer.close()
        remote_list = p1.split("\n")
        for remote in remote_list:
            url = ObjectDict()
            url.set("url", self._get_url(remote))
            setattr(new_remotes, remote, url)

        return new_remotes
Example #18
0
    def PackageCapsuleHeader(InputBin, OutputBin, FmpDeviceGuid=None):
        logging.debug("CapsulePackage: Final Capsule Header")
        if(FmpDeviceGuid == None):
            logging.debug("CapsulePackage: Using default industry standard FMP guid")
            FmpDeviceGuid = "6dcbd5ed-e82d-4c44-bda1-7194199ad92a"

        params = "-o " + OutputBin
        params = params + " -g " + FmpDeviceGuid
        params = params + " --capsule -v -f " + InputBin
        params = params + " --capFlag PersistAcrossReset --capFlag InitiateReset"
        ret = RunCmd("genfv", params)
        if(ret != 0):
            raise Exception("GenFv Failed with errorcode" % ret)
        return ret
Example #19
0
def GetLatestNugetVersion(package_name, source=None):
    cmd = NugetDependency.GetNugetCmd()
    cmd += ["list"]
    cmd += [package_name]
    if source is not None:
        cmd += ["-Source", source]
    return_buffer = StringIO()
    if (RunCmd(cmd[0], " ".join(cmd[1:]), outstream=return_buffer) == 0):
        # Seek to the beginning of the output buffer and capture the output.
        return_buffer.seek(0)
        return_string = return_buffer.read()
        return_buffer.close()
        return return_string.strip().strip(package_name).strip()
    else:
        return "0.0.0.0"
Example #20
0
    def _get_dirty(self):
        return_buffer = StringIO()
        params = "status --short"

        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()

        if len(p1) > 0:
            return True

        return_buffer = StringIO()
        params = "log --branches --not --remotes --decorate --oneline"

        RunCmd("git", params, workingdir=self._path, outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        return_buffer.close()

        if len(p1) > 0:
            return True

        return False
Example #21
0
    def pull(self):
        return_buffer = StringIO()

        params = "pull"

        ret = RunCmd("git",
                     params,
                     workingdir=self._path,
                     outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        if ret != 0:
            self._logger.error(p1)
            return False

        return True
Example #22
0
def DownloadNugetPackageVersion(package_name: str,
                                version: str,
                                destination: os.PathLike,
                                source=None):
    cmd = NugetDependency.GetNugetCmd()
    cmd += ["install", package_name]
    if source is not None:
        cmd += ["-Source", source]
    cmd += ["-ExcludeVersion"]
    cmd += ["-Version", version]
    cmd += ["-Verbosity", "detailed"]
    cmd += ["-OutputDirectory", '"' + destination + '"']
    ret = RunCmd(cmd[0], " ".join(cmd[1:]))
    if ret != 0:
        return False
    else:
        return True
Example #23
0
    def checkout(self, branch=None, commit=None):
        return_buffer = StringIO()
        if branch is not None:
            params = "checkout %s" % branch
        elif commit is not None:
            params = "checkout %s" % commit
        ret = RunCmd("git",
                     params,
                     workingdir=self._path,
                     outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        if ret != 0:
            self._logger.debug(p1)
            return False

        return True
Example #24
0
    def submodule(self, command, *args):
        self._logger.debug("Calling command on submodule {0} with {1}".format(
            command, args))
        return_buffer = StringIO()
        flags = " ".join(args)
        params = "submodule {0} {1}".format(command, flags)

        ret = RunCmd("git",
                     params,
                     workingdir=self._path,
                     outstream=return_buffer)

        p1 = return_buffer.getvalue().strip()
        if ret != 0:
            self._logger.error(p1)
            return False

        return True
def FindWithVsWhere(products: str = "*"):
    cmd = "-latest -nologo -all -property installationPath"
    vs_where_path = GetVsWherePath()
    if vs_where_path is None:
        logging.warning("We weren't able to find VSWhere")
        return (1, None)
    if (products is not None):
        cmd += " -products " + products
    a = StringIO()
    ret = RunCmd(vs_where_path, cmd, outstream=a)
    if (ret != 0):
        a.close()
        return (ret, None)
    p1 = a.getvalue().strip()
    a.close()
    if (len(p1.strip()) > 0):
        return (0, p1)
    return (ret, None)
Example #26
0
    def _fetch_from_cache(self, package_name):
        result = False

        #
        # We still need to use Nuget to figure out where the
        # "global-packages" cache is on this machine.
        #
        if NugetDependency.global_cache_path is None:
            cmd = NugetDependency.GetNugetCmd()
            cmd += ["locals", "global-packages", "-list"]
            return_buffer = StringIO()
            if (RunCmd(cmd[0], " ".join(cmd[1:]),
                       outstream=return_buffer) == 0):
                # Seek to the beginning of the output buffer and capture the output.
                return_buffer.seek(0)
                return_string = return_buffer.read()
                NugetDependency.global_cache_path = return_string.strip(
                ).strip("global-packages: ")

        #
        # If the path couldn't be found, we can't do anything else.
        #
        if not os.path.isdir(NugetDependency.global_cache_path):
            logging.info(
                "Could not determine Nuget global packages cache location.")
            return False

        #
        # Now, try to locate our actual cache path
        nuget_version = NugetDependency.normalize_version(self.version)
        cache_search_path = os.path.join(NugetDependency.global_cache_path,
                                         package_name.lower(), nuget_version,
                                         package_name)
        if os.path.isdir(cache_search_path):
            logging.info(
                "Local Cache found for Nuget package '%s'. Skipping fetch.",
                package_name)
            shutil.copytree(cache_search_path, self.contents_dir)
            self.update_state_file()
            result = True

        return result
Example #27
0
def GetReleaseForCommit(commit_hash: str):
    git_dir = os.path.dirname(SCRIPT_PATH)
    cmd_args = ["log", '--format="%h %D"', "--all", "-n 100"]
    return_buffer = StringIO()
    RunCmd("git",
           " ".join(cmd_args),
           workingdir=git_dir,
           outstream=return_buffer)
    return_buffer.seek(0)
    results = return_buffer.readlines()
    return_buffer.close()
    log_re = re.compile(r'release/(\d{6})')
    for log_item in results:
        commit = log_item[:11]
        branch = log_item[11:].strip()
        if len(branch) == 0:
            continue
        match = log_re.search(branch)
        if match:
            return match.group(1)

    raise RuntimeError(
        "We couldn't find the release branch that we correspond to")
def minimum_env_init(my_workspace_path, my_project_scope):
    # TODO: Check the Git version against minimums.

    # Check the Python version against minimums.
    cur_py = "%d.%d.%d" % sys.version_info[:3]
    VersionAggregator.GetVersionAggregator().ReportVersion(
        "Python", cur_py, VersionAggregator.VersionTypes.TOOL)

    soft_min_py = "3.7"
    hard_min_py = "3.6"

    if version_compare(hard_min_py, cur_py) > 0:
        raise RuntimeError(
            "Please upgrade Python! Current version is %s. Minimum is %s." %
            (cur_py, hard_min_py))
    if version_compare(soft_min_py, cur_py) > 0:
        logging.error(
            "Please upgrade Python! Current version is %s. Recommended minimum is %s."
            % (cur_py, soft_min_py))

    return_buffer = StringIO()
    RunCmd("git", "--version", outstream=return_buffer)
    git_version = return_buffer.getvalue().strip()
    return_buffer.close()
    VersionAggregator.GetVersionAggregator().ReportVersion(
        "Git", git_version, VersionAggregator.VersionTypes.TOOL)
    min_git = "2.11.0"
    # This code is highly specific to the return value of "git version"...
    cur_git = ".".join(git_version.split(' ')[2].split(".")[:3])
    if version_compare(min_git, cur_git) > 0:
        raise RuntimeError(
            "Please upgrade Git! Current version is %s. Minimum is %s." %
            (cur_git, min_git))

    # Initialized the build environment.
    return SelfDescribingEnvironment.BootstrapEnvironment(
        my_workspace_path, my_project_scope)
def setup_process(my_workspace_path,
                  my_project_scope,
                  my_required_repos,
                  force_it=False,
                  cache_path=None):
    def log_lines(level, lines):
        for line in lines.split("\n"):
            if line != "":
                logging.log(level, line)

    # Pre-setup cleaning if "--force" is specified.
    if force_it:
        try:
            # Clean and reset the main repo.
            MuLogging.log_progress("## Cleaning the root repo...")
            cmd_with_output('git reset --hard', my_workspace_path)
            log_lines(logging.INFO,
                      cmd_with_output('git clean -xffd', my_workspace_path))
            MuLogging.log_progress("Done.\n")

            # Clean any submodule repos.
            if my_required_repos:
                for required_repo in my_required_repos:
                    MuLogging.log_progress(
                        "## Cleaning Git repository: %s..." % required_repo)
                    required_repo_path = os.path.normpath(
                        os.path.join(my_workspace_path, required_repo))
                    cmd_with_output('git reset --hard', required_repo_path)
                    log_lines(
                        logging.INFO,
                        cmd_with_output('git clean -xffd', required_repo_path))
                    MuLogging.log_progress("Done.\n")

        except RuntimeError as e:
            logging.error("FAILED!\n")
            logging.error("Error while trying to clean the environment!")
            log_lines(logging.ERROR, str(e))
            return

    # Grab the remaining Git repos.
    if my_required_repos:
        # Git Repos: STEP 1 --------------------------------------
        # Make sure that the repos are all synced.
        try:
            MuLogging.log_progress("## Syncing Git repositories: %s..." %
                                   ", ".join(my_required_repos))
            cmd_with_output(
                'git submodule sync -- ' + " ".join(my_required_repos),
                my_workspace_path)
            MuLogging.log_progress("Done.\n")
        except RuntimeError as e:
            logging.error("FAILED!\n")
            logging.error("Error while trying to synchronize the environment!")
            log_lines(logging.ERROR, str(e))
            return

        # Git Repos: STEP 2 --------------------------------------
        # Iterate through all repos and see whether they should be fetched.
        for required_repo in my_required_repos:
            try:
                MuLogging.log_progress("## Checking Git repository: %s..." %
                                       required_repo)

                # Git Repos: STEP 2a ---------------------------------
                # Need to determine whether to skip this repo.
                required_repo_path = os.path.normpath(
                    os.path.join(my_workspace_path, required_repo))
                skip_repo = False
                # If the repo exists (and we're not forcing things) make
                # sure that it's not in a "dirty" state.
                if os.path.exists(required_repo_path) and not force_it:
                    git_data = cmd_with_output('git diff ' + required_repo,
                                               my_workspace_path)

                    # If anything was returned, we should skip processing the repo.
                    # It is either on a different commit or it has local changes.
                    if git_data != "":
                        logging.info(
                            "-- NOTE: Repo currently exists and appears to have local changes!"
                        )
                        logging.info("-- Skipping fetch!")
                        skip_repo = True

                # Git Repos: STEP 2b ---------------------------------
                # If we're not skipping, grab it.
                if not skip_repo or force_it:
                    logging.info("## Fetching repo.")
                    # Using RunCmd for this one because the c.wait blocks incorrectly somehow.
                    cmd_string = "submodule update --init --recursive --progress"
                    if cache_path is not None:
                        cmd_string += " --reference " + cache_path
                    cmd_string += " " + required_repo
                    RunCmd('git', cmd_string, workingdir=my_workspace_path)

                MuLogging.log_progress("Done.\n")

            except RuntimeError as e:
                logging.error("FAILED!\n")
                logging.error("Failed to fetch required repository!\n")
                log_lines(logging.ERROR, str(e))

    # Now that we should have all of the required code,
    # we're ready to build the environment and fetch the
    # dependencies for this project.
    MuLogging.log_progress("## Fetching all external dependencies...")
    (build_env, shell_env) = minimum_env_init(my_workspace_path,
                                              my_project_scope)
    SelfDescribingEnvironment.UpdateDependencies(my_workspace_path,
                                                 my_project_scope)
    MuLogging.log_progress("Done.\n")