def fetchYumRepo(self, basepath="./", callback=None, verify_options=None): startTime = time.time() self.yumFetch = RepoFetch(self.repo_label, repourl=self.repo_url, \ cacert=self.sslcacert, clicert=self.sslclientcert, \ clikey=self.sslclientkey, mirrorlist=self.mirrors, \ download_dir=basepath, proxy_url=self.proxy_url, \ proxy_port=self.proxy_port, proxy_user=self.proxy_user, \ proxy_pass=self.proxy_pass, sslverify=self.sslverify, max_speed=self.max_speed, verify_options=verify_options) self.fetchPkgs = ParallelFetch(self.yumFetch, self.numThreads, callback=callback) try: if not verify_options: verify_options = {"size":False, "checksum":False} self.yumFetch.setupRepo() # first fetch the metadata self.fetchPkgs.processCallback(ProgressReport.DownloadMetadata) self.yumFetch.getRepoData() if self.stopped: return None if not self.skip.has_key('packages') or self.skip['packages'] != 1: # get rpms to fetch self.prepareRPMS() # get drpms to fetch self.prepareDRPMS() else: log("Skipping packages preparation from sync process") if not self.skip.has_key('distribution') or self.skip['distribution'] != 1: # get Trees to fetch self.prepareTrees() else: log("Skipping distribution preparation from sync process") # prepare for download self.fetchPkgs.addItemList(self.downloadinfo) self.fetchPkgs.start() report = self.fetchPkgs.waitForFinish() self.yumFetch.finalizeMetadata() endTime = time.time() #log("Processed <%s> items in [%d] seconds" % (len(self.downloadinfo), \ # (endTime - startTime))) if not self.skip.has_key('packages') or self.skip['packages'] != 1: if self.purge_orphaned: #log("Cleaning any orphaned packages..") self.fetchPkgs.processCallback(ProgressReport.PurgeOrphanedPackages) self.purgeOrphanPackages(self.yumFetch.getPackageList(), self.yumFetch.repo_dir) if self.remove_old: log("Removing old packages to limit to %s" % self.numOldPackages) self.fetchPkgs.processCallback(ProgressReport.RemoveOldPackages) gutils = GrinderUtils() gutils.runRemoveOldPackages(self.pkgsavepath, self.numOldPackages) self.yumFetch.deleteBaseCacheDir() #log("Processed <%s> in %s seconds" % (report, endTime - startTime)) return report, (endTime - startTime) finally: self.fetchPkgs.stop() self.yumFetch.closeRepo()
def test_prepareTrees_no_treeinfo(self): test_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/repo_resync_a/" temp_label = "test_prepareTrees_no_treeinfo" repo_dir = os.path.join(self.temp_dir, temp_label) info = DistroInfo(repo_url=test_url, repo_dir=repo_dir, distropath=self.temp_dir) repoFetch = RepoFetch() distro_items = info.prepareTrees(repoFetch) self.assertEquals(len(distro_items), 0)
def test_prepareTrees_bad_url(self): test_url = "http://repos.fedorapeople.org/repos/pulp/pulp/BAD_URL_DOESNT_EXIST/" temp_label = "test_prepareTrees_bad_url" repo_dir = os.path.join(self.temp_dir, temp_label) info = DistroInfo(repo_url=test_url, repo_dir=repo_dir, distropath=self.temp_dir) repoFetch = RepoFetch() distro_items = info.prepareTrees(repoFetch) self.assertEquals(len(distro_items), 0)
def test_prepareTrees(self): test_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" temp_label = "test_prepareTrees" repo_dir = os.path.join(self.temp_dir, temp_label) info = DistroInfo(repo_url=test_url, repo_dir=repo_dir, distropath=self.temp_dir) repoFetch = RepoFetch() distro_items = info.prepareTrees(repoFetch) self.assertEquals(len(distro_items['files']), 3)
def parse_metadata(label, url, download_dir, cacert=None, clientcert=None): yumFetch = None try: yumFetch = RepoFetch(label, repourl=url, \ cacert=cacert, clicert=clientcert, \ download_dir=download_dir) yumFetch.setupRepo() yumFetch.getRepoData() pkglist = yumFetch.getPackageList() return len(pkglist) finally: # Note, if we don't execute closeRepo() then we will leak memory. # running with closeRepo() and I am not seeing any memory leaked yumFetch.closeRepo() del yumFetch