def clone_repo(abs_file_system_path, DepObj): logger = logging.getLogger("git") logger.log(edk2_logging.get_progress_level(), "Cloning repo: {0}".format(DepObj["Url"])) dest = abs_file_system_path if not os.path.isdir(dest): os.makedirs(dest, exist_ok=True) shallow = False if "Commit" in DepObj: shallow = False if "Full" in DepObj and DepObj["Full"] is True: shallow = False reference = None if "ReferencePath" in DepObj and os.path.exists(DepObj["ReferencePath"]): reference = os.path.abspath(DepObj["ReferencePath"]) result = Repo.clone_from(DepObj["Url"], dest, shallow=shallow, reference=reference) if result is None: if "ReferencePath" in DepObj: # attempt a retry without the reference logger.warning("Reattempting to clone without a reference. {0}".format(DepObj["Url"])) result = Repo.clone_from(DepObj["Url"], dest, shallow=shallow) if result is None: return None return dest
def verify(self): result = True if not os.path.isdir(self._local_repo_root_path): self.logger.error("no dir for Git Dependency") result = False if result and len(os.listdir(self._local_repo_root_path)) == 0: self.logger.error("no files in Git Dependency") result = False if result: # valid repo folder r = Repo(self._local_repo_root_path) if (not r.initalized): self.logger.error("Git Dependency: Not Initialized") result = False elif (r.dirty): self.logger.error("Git Dependency: dirty") result = False if (r.head.commit != self.version): self.logger.error( f"Git Dependency: head is {r.head.commit} and version is {self.version}" ) result = False self.logger.debug("Verify '%s' returning '%s'." % (self.name, result)) return result
def resolve(file_system_path, dependency, force=False, ignore=False, update_ok=False): logger = logging.getLogger("git") logger.info("Checking for dependency {0}".format(dependency["Path"])) git_path = os.path.abspath(file_system_path) # check if we have a path in our dependency if "Path" in dependency and not git_path.endswith(os.path.relpath(dependency["Path"])): # if we don't already the the path from the dependency at the end of the path we've been giving git_path = os.path.join(git_path, dependency["Path"]) ## # NOTE - this process is defined in the Readme.md including flow chart for this behavior ## if not os.path.isdir(git_path): clone_repo(git_path, dependency) r = Repo(git_path) checkout(git_path, dependency, r, True, False) return r folder_empty = len(os.listdir(git_path)) == 0 if folder_empty: # if the folder is empty, we can clone into it clone_repo(git_path, dependency) r = Repo(git_path) checkout(git_path, dependency, r, True, False) return r repo = Repo(git_path) if not repo.initalized: # if there isn't a .git folder in there if force: clear_folder(git_path) logger.warning( "Folder {0} is not a git repo and is being overwritten!".format(git_path)) clone_repo(git_path, dependency) checkout(git_path, dependency, repo, True, False) return repo else: if(ignore): logger.warning( "Folder {0} is not a git repo but Force parameter not used. " "Ignore State Allowed.".format(git_path)) return repo else: logger.critical( "Folder {0} is not a git repo and it is not empty.".format(git_path)) raise Exception( "Folder {0} is not a git repo and it is not empty".format(git_path)) if repo.dirty: if force: clear_folder(git_path) logger.warning( "Folder {0} is a git repo but is dirty and is being overwritten as requested!".format(git_path)) clone_repo(git_path, dependency) checkout(git_path, dependency, repo, True, False) return repo else: if(ignore): logger.warning( "Folder {0} is a git repo but is dirty and Force parameter not used. " "Ignore State Allowed.".format(git_path)) return repo else: logger.critical( "Folder {0} is a git repo and is dirty.".format(git_path)) raise Exception( "Folder {0} is a git repo and is dirty.".format(git_path)) if repo.remotes.origin.url != dependency["Url"]: if force: clear_folder(git_path) logger.warning( "Folder {0} is a git repo but it is at a different repo and is " "being overwritten as requested!".format(git_path)) clone_repo(git_path, dependency) checkout(git_path, dependency, repo, True, False) else: if ignore: logger.warning( "Folder {0} is a git repo pointed at a different remote. " "Can't checkout or sync state".format(git_path)) return else: logger.critical("The URL of the git Repo {2} in the folder {0} does not match {1}".format( git_path, dependency["Url"], repo.remotes.origin.url)) raise Exception("The URL of the git Repo {2} in the folder {0} does not match {1}".format( git_path, dependency["Url"], repo.remotes.origin.url)) checkout(git_path, dependency, repo, update_ok, ignore, force) return repo
def get_details(abs_file_system_path): repo = Repo(abs_file_system_path) url = repo.remotes.origin.url active_branch = repo.active_branch head = repo.head.commit return {"Url": url, "Branch": active_branch, "Commit": head}
def checkout(abs_file_system_path, dep, repo, update_ok=False, ignore_dep_state_mismatch=False, force=False): logger = logging.getLogger("git") if repo is None: repo = Repo(abs_file_system_path) if "Commit" in dep: commit = dep["Commit"] if update_ok or force: repo.fetch() result = repo.checkout(commit=commit) if result is False: repo.fetch() repo.checkout(commit=commit) repo.submodule("update", "--init", "--recursive") else: if repo.head.commit == commit: logger.debug("Dependency {0} state ok without update".format( dep["Path"])) return elif ignore_dep_state_mismatch: logger.warning( "Dependency {0} is not in sync with requested commit. Ignore state allowed" .format(dep["Path"])) return else: logger.critical( "Dependency {0} is not in sync with requested commit. Fail." .format(dep["Path"])) raise Exception( "Dependency {0} is not in sync with requested commit. Fail." .format(dep["Path"])) elif "Branch" in dep: branch = dep["Branch"] if update_ok or force: repo.fetch() result = repo.checkout(branch=branch) if result is False: # we failed to do this # try to fetch it and try to checkout again logger.info( "We failed to checkout this branch, we'll try to fetch") repo.fetch(branch=branch) repo.checkout(branch=branch) repo.submodule("update", "--init", "--recursive") else: if repo.active_branch == dep["Branch"]: logger.debug("Dependency {0} state ok without update".format( dep["Path"])) return elif ignore_dep_state_mismatch: logger.warning( "Dependency {0} is not in sync with requested branch. Ignore state allowed" .format(dep["Path"])) return else: error = "Dependency {0} is not in sync with requested branch. Expected: {1}. Got {2} Fail.".format( dep["Path"], dep["Branch"], repo.active_branch) logger.critical(error) raise Exception(error) else: raise Exception("Branch or Commit must be specified for {0}".format( dep["Path"]))
def main(): # setup main console as logger logger = logging.getLogger('') logger.setLevel(logging.NOTSET) console = edk2_logging.setup_console_logging(False) logger.addHandler(console) ErrorCode = 0 auto_fetch = False input_config_remotes = None # arg parse args = get_cli_options() if args.debug: console.setLevel(logging.DEBUG) logging.info("Log Started: " + datetime.datetime.strftime( datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p")) args.cache_dir = CommonFilePathHandler(args.cache_dir) logging.debug("OMNICACHE dir: {0}".format(args.cache_dir)) # input config file for adding new entries if args.input_config_file is not None: args.input_config_file = CommonFilePathHandler(args.input_config_file) if not os.path.isfile(args.input_config_file): logging.critical( "Invalid -c argument given. File ({0}) isn't valid".format( args.input_config_file)) return -4 logging.debug("Args: " + str(args)) omnicache_config = None # config object omnicache_config_file = os.path.join(args.cache_dir, OMNICACHE_FILENAME) if args.new: if os.path.isdir(args.cache_dir): logging.critical( "--new argument given but OMNICACHE path already exists!") return -1 InitOmnicache(args.cache_dir) auto_fetch = True if args.init: if os.path.isdir(args.cache_dir): if os.path.isfile(omnicache_config_file): logging.debug( "OMNICACHE already exists. No need to initialize") else: InitOmnicache(args.cache_dir) auto_fetch = True # Check to see if exists if not os.path.isdir(args.cache_dir): logging.critical("OMNICACHE path invalid.") return -2 # load config omnicache_config = OmniCacheConfig(omnicache_config_file) os.chdir(args.cache_dir) if (len(args.add) > 0): auto_fetch = True for inputdata in args.add: if len(inputdata) == 2: AddEntry(omnicache_config, inputdata[0], inputdata[1]) elif len(inputdata) == 3: AddEntry(omnicache_config, inputdata[0], inputdata[1], bool(inputdata[2])) else: logging.critical( "Invalid Add Entry. Should be <name> <url> <Sync Tags optional default=False>" ) return -3 if (args.input_config_file is not None): (count, input_config_remotes) = AddEntriesFromConfig(omnicache_config, args.input_config_file) if (count > 0): auto_fetch = True if len(args.remove) > 0: for inputdata in args.remove: RemoveEntry(omnicache_config, inputdata) # if we need to scan if args.scan is not None: logging.critical("OMNICACHE is scanning the folder %s.") if not os.path.isdir(args.scan): logging.error("Invalid scan directory") return -4 reposFound = dict() # iterate through top level directories dirs = os.listdir(args.scan) while len(dirs) > 0: item = dirs.pop() itemDir = os.path.join(args.scan, item) if os.path.isfile(itemDir): continue logging.info("Scanning %s for a git repo" % item) gitDir = os.path.join(itemDir, ".git") # Check if it's a directory or a file (submodules usually have a file instead of a folder) if os.path.isdir(gitDir) or os.path.isfile(gitDir): repo = Repo(itemDir) if repo.url: if repo.url not in reposFound: reposFound[repo.url] = item else: logging.warning( "Skipping previously found repo at %s with url %s" % (item, repo.url)) else: # if repo.url is none logging.error("Url not found for git repo at: %s" % itemDir) # check for submodules if repo.submodules: for submodule in repo.submodules: dirs.append(os.path.join(item, submodule)) else: logging.error("Git repo not found at %s" % itemDir) # go through all the URLs found for url in reposFound: omnicache_config.Add(reposFound[url], url) omnicache_config.Save() if (args.fetch or (auto_fetch and not args.no_fetch)): logging.critical("Updating OMNICACHE") # as an optimization, if input config file provided, only fetch remotes specified in input config # otherwise, fetch all remotes in the OmniCache if (input_config_remotes is not None): remotes = (x["name"] for x in input_config_remotes) else: remotes = omnicache_config.remotes.keys() for remote in remotes: ret = FetchEntry(omnicache_config.remotes[remote]["name"], ("tag" in omnicache_config.remotes[remote])) if (ret != 0) and (ErrorCode == 0): ErrorCode = ret if args.list: ret = ConsistencyCheckCacheConfig(omnicache_config) if (ret != 0) and (ErrorCode == 0): ErrorCode = ret print("List OMNICACHE content\n") if len(omnicache_config.remotes) == 0: logging.warning("No Remotes to show") for remote in omnicache_config.remotes.values(): rstring = "Name: {0}\n Url: {1}\n Sync Tags: {2}".format( remote["name"], remote["url"], ("tag" in remote)) print(" " + rstring + "\n\n") print("To use your OMNICACHE set the env variable:") print("set OMNICACHE_PATH=" + args.cache_dir) return ErrorCode