def getChannelLabels(self): labels = {} satDump = SatDumpClient(self.baseURL) channelFamilies = satDump.getChannelFamilies(self.systemid) for d in channelFamilies.values(): if (d["label"] in self.skipProductList): continue labels[d["label"]] = d["channel_labels"] return labels
def checkChannels(self, channelsToSync): """ Input: channelsToSync - list of channels to sync Output: list containing bad channel names """ satDump = SatDumpClient(self.baseURL) channelFamilies = satDump.getChannelFamilies(self.systemid) badChannel = [] for channelLabel in channelsToSync: found = False for d in channelFamilies.values(): if channelLabel in d["channel_labels"]: LOG.debug("Found %s under %s" % (channelLabel, d["label"])) found = True break if not found: LOG.debug("Unable to find %s, adding it to badChannel list" % (channelLabel)) badChannel.append(channelLabel) return badChannel
def syncPackages(self, channelLabel, savePath, verbose=0): """ channelLabel - channel to sync packages from savePath - path to save packages verbose - if true display more output """ startTime = time.time() if channelLabel == "": LOG.critical("No channel label specified to sync, abort sync.") raise NoChannelLabelException() LOG.info("sync(%s, %s) invoked" % (channelLabel, verbose)) satDump = SatDumpClient(self.baseURL, verbose=verbose) LOG.debug("*** calling product_names ***") packages = satDump.getChannelPackages(self.systemid, channelLabel) LOG.info("%s packages are available, getting list of short metadata now." % (len(packages))) pkgInfo = satDump.getShortPackageInfo(self.systemid, packages, filterLatest = not self.fetchAll) LOG.info("%s packages have been marked to be fetched" % (len(pkgInfo.values()))) numThreads = int(self.parallel) LOG.info("Running in parallel fetch mode with %s threads" % (numThreads)) pkgFetch = PackageFetch(self.systemid, self.baseURL, channelLabel, savePath) self.parallelFetchPkgs = ParallelFetch(pkgFetch, numThreads) self.parallelFetchPkgs.addItemList(pkgInfo.values()) self.parallelFetchPkgs.start() report = self.parallelFetchPkgs.waitForFinish() LOG.debug("Attempting to fetch comps.xml info from RHN") self.fetchCompsXML(savePath, channelLabel) self.fetchUpdateinfo(savePath, channelLabel) endTime = time.time() LOG.info("Processed <%s> %s packages, %s errors, completed in %s seconds" \ % (channelLabel, report.successes, report.errors, (endTime-startTime))) if self.removeOldPackages: LOG.info("Remove old packages from %s" % (savePath)) self.runRemoveOldPackages(savePath) return report
def syncKickstarts(self, channelLabel, savePath, verbose=0): """ channelLabel - channel to sync kickstarts from savePath - path to save kickstarts verbose - if true display more output """ startTime = time.time() satDump = SatDumpClient(self.baseURL, verbose=verbose) ksLabels = satDump.getKickstartLabels(self.systemid, [channelLabel]) LOG.info("Found %s kickstart labels for channel %s" % (len(ksLabels[channelLabel]), channelLabel)) ksFiles = [] for ksLbl in ksLabels[channelLabel]: LOG.info("Syncing kickstart label: %s" % (ksLbl)) metadata = satDump.getKickstartTreeMetadata(self.systemid, [ksLbl]) LOG.info("Retrieved metadata on %s files for kickstart label: %s" % (len(metadata[ksLbl]["files"]), ksLbl)) ksSavePath = os.path.join(savePath, ksLbl) for ksFile in metadata[ksLbl]["files"]: info = {} info["relative-path"] = ksFile["relative-path"] info["size"] = ksFile["file-size"] info["md5sum"] = ksFile["md5sum"] info["ksLabel"] = ksLbl info["channelLabel"] = channelLabel info["savePath"] = ksSavePath ksFiles.append(info) ksFetch = KickstartFetch(self.systemid, self.baseURL) numThreads = int(self.parallel) self.parallelFetchKickstarts = ParallelFetch(ksFetch, numThreads) self.parallelFetchKickstarts.addItemList(ksFiles) self.parallelFetchKickstarts.start() report = self.parallelFetchKickstarts.waitForFinish() endTime = time.time() LOG.info("Processed %s %s %s kickstart files, %s errors, completed in %s seconds" \ % (channelLabel, ksLabels[channelLabel], report.successes, report.errors, (endTime-startTime))) return report