Пример #1
0
 def fetchYumRepo(self, basepath="./", callback=None, verify_options=None):
     startTime = time.time()
     self.yumFetch = RepoFetch(self.repo_label, repourl=self.repo_url, \
                         cacert=self.sslcacert, clicert=self.sslclientcert, \
                         clikey=self.sslclientkey, mirrorlist=self.mirrors, \
                         download_dir=basepath, proxy_url=self.proxy_url, \
                         proxy_port=self.proxy_port, proxy_user=self.proxy_user, \
                         proxy_pass=self.proxy_pass, sslverify=self.sslverify,
                         max_speed=self.max_speed,
                         verify_options=verify_options)
     self.fetchPkgs = ParallelFetch(self.yumFetch, self.numThreads, callback=callback)
     try:
         if not verify_options:
             verify_options = {"size":False, "checksum":False}
         self.yumFetch.setupRepo()
         # first fetch the metadata
         self.fetchPkgs.processCallback(ProgressReport.DownloadMetadata)
         self.yumFetch.getRepoData()
         if self.stopped:
             return None
         if not self.skip.has_key('packages') or self.skip['packages'] != 1:
             # get rpms to fetch
             self.prepareRPMS()
             # get drpms to fetch
             self.prepareDRPMS()
         else:
             log("Skipping packages preparation from sync process")
         if not self.skip.has_key('distribution') or self.skip['distribution'] != 1:
             # get Trees to fetch
             self.prepareTrees()
         else:
             log("Skipping distribution preparation from sync process")
         # prepare for download
         self.fetchPkgs.addItemList(self.downloadinfo)
         self.fetchPkgs.start()
         report = self.fetchPkgs.waitForFinish()
         self.yumFetch.finalizeMetadata()
         endTime = time.time()
         #log("Processed <%s> items in [%d] seconds" % (len(self.downloadinfo), \
         #      (endTime - startTime)))
         if not self.skip.has_key('packages') or self.skip['packages'] != 1:
             if self.purge_orphaned:
                 #log("Cleaning any orphaned packages..")
                 self.fetchPkgs.processCallback(ProgressReport.PurgeOrphanedPackages)
                 self.purgeOrphanPackages(self.yumFetch.getPackageList(), self.yumFetch.repo_dir)
             if self.remove_old:
                 log("Removing old packages to limit to %s" % self.numOldPackages)
                 self.fetchPkgs.processCallback(ProgressReport.RemoveOldPackages)
                 gutils = GrinderUtils()
                 gutils.runRemoveOldPackages(self.pkgsavepath, self.numOldPackages)
         self.yumFetch.deleteBaseCacheDir()
         #log("Processed <%s> in %s seconds" % (report, endTime - startTime))
         return report, (endTime - startTime)
     finally:
         self.fetchPkgs.stop()
         self.yumFetch.closeRepo()
Пример #2
0
    def setup(self, basepath="./", callback=None, verify_options=None, num_retries=None, retry_delay=None):
        """
        Fetches yum metadata and determines what object should be downloaded.

        @param basepath: path to store repo data
        @type basepath: str

        @param callback: progress callback function
        @type callback: function which accepts a grinder.GrinderCallback.ProgressReport

        @param verify_options: controls verification checks on "size" and "checksum".
        @type verify_options: dict{"size":bool,"checksum":bool}

        @param num_retries: number of retries to perform if an error occurs
        @type num_retries: int

        @param retry_delay: delay in seconds between retries, delay = 'retry_attempt' * 'retry_delay'
        @type retry_delay: int
        """
        self.repo_dir = os.path.join(basepath, self.repo_label)
        LOG.info("%s, %s, Calling RepoFetch with: cacert=<%s>, clicert=<%s>, clikey=<%s>, proxy_url=<%s>, proxy_port=<%s>, proxy_user=<%s>, proxy_pass=<NOT_LOGGED>, sslverify=<%s>, max_speed=<%s>, verify_options=<%s>, filter=<%s>" %\
             (self.repo_label, self.repo_url, self.sslcacert, self.sslclientcert, self.sslclientkey, self.proxy_url, self.proxy_port, self.proxy_user, self.sslverify, self.max_speed, verify_options, self.filter))

        self.repoFetch = RepoFetch(cacert=self.sslcacert, clicert=self.sslclientcert, clikey=self.sslclientkey,\
        proxy_url=self.proxy_url, proxy_port=self.proxy_port,
        proxy_user=self.proxy_user, proxy_pass=self.proxy_pass,
        sslverify=self.sslverify,
        max_speed=self.max_speed,
        verify_options=verify_options, num_retries=num_retries)
        self.fetchPkgs = ParallelFetch(self.repoFetch, self.numThreads, callback=callback)
        self.fetchPkgs.processCallback(ProgressReport.DownloadMetadata)

        info = YumInfo(
            repo_label=self.repo_label, repo_url=self.repo_url, 
            mirrors = self.mirrors, repo_dir=self.repo_dir, 
            packages_location=self.pkgpath, newest=self.newest,
            remove_old=self.remove_old, numOldPackages=self.numOldPackages,
            cacert=self.sslcacert, clicert=self.sslclientcert, 
            clikey=self.sslclientkey, proxy_url=self.proxy_url, 
            proxy_port=self.proxy_port, proxy_user=self.proxy_user, 
            proxy_pass=self.proxy_pass, sslverify=self.sslverify, skip=self.skip,
            tmp_path=self.tmp_path, filter=self.filter,
            num_retries=num_retries, retry_delay=retry_delay)
        info.setUp()
        self.rpmlist = info.rpms
        self.drpmlist = info.drpms
Пример #3
0
 def fetch(self, basepath="./", callback=None):
     LOG.info("fetch basepath = %s" % (basepath))
     startTime = time.time()
     self.fileFetch = FileFetch(self.repo_label, self.repo_url, cacert=self.sslcacert, \
                                clicert=self.sslclientcert, clikey=self.sslclientkey, \
                                download_dir=basepath, proxy_url=self.proxy_url, \
                                proxy_port=self.proxy_port, proxy_user=self.proxy_user, \
                                proxy_pass=self.proxy_pass, sslverify=self.sslverify, max_speed=self.max_speed)
     self.parallel_fetch_files = ParallelFetch(self.fileFetch,
                                               self.numThreads,
                                               callback=callback)
     LOG.info("Determining downloadable Content bits...")
     self.parallel_fetch_files.processCallback(
         ProgressReport.DownloadMetadata)
     self.prepareFiles()
     # prepare for download
     self.parallel_fetch_files.addItemList(self.downloadinfo)
     self.parallel_fetch_files.start()
     report = self.parallel_fetch_files.waitForFinish()
     endTime = time.time()
     LOG.info("Processed <%s> items in [%d] seconds" % (len(self.downloadinfo), \
               (endTime - startTime)))
     return report
Пример #4
0
    def syncPackages(self, channelLabel, savePath, verbose=0):
        """
        channelLabel - channel to sync packages from
        savePath - path to save packages
        verbose - if true display more output
        """
        startTime = time.time()
        if channelLabel == "":
            LOG.critical("No channel label specified to sync, abort sync.")
            raise NoChannelLabelException()
        LOG.info("sync(%s, %s) invoked" % (channelLabel, verbose))
        satDump = SatDumpClient(self.baseURL, verbose=verbose)
        LOG.debug("*** calling product_names ***")
        packages = satDump.getChannelPackages(self.systemid, channelLabel)
        LOG.info("%s packages are available, getting list of short metadata now." % (len(packages)))
        pkgInfo = satDump.getShortPackageInfo(self.systemid, packages, filterLatest = not self.fetchAll)
        LOG.info("%s packages have been marked to be fetched" % (len(pkgInfo.values())))

        numThreads = int(self.parallel)
        LOG.info("Running in parallel fetch mode with %s threads" % (numThreads))
        pkgFetch = PackageFetch(self.systemid, self.baseURL, channelLabel, savePath)
        self.parallelFetchPkgs = ParallelFetch(pkgFetch, numThreads)
        self.parallelFetchPkgs.addItemList(pkgInfo.values())
        self.parallelFetchPkgs.start()
        report = self.parallelFetchPkgs.waitForFinish()
        LOG.debug("Attempting to fetch comps.xml info from RHN")
        self.fetchCompsXML(savePath, channelLabel)
        self.fetchUpdateinfo(savePath, channelLabel)
        endTime = time.time()
        LOG.info("Processed <%s> %s packages, %s errors, completed in %s seconds" \
                % (channelLabel, report.successes, report.errors, (endTime-startTime)))
        if self.removeOldPackages:
            LOG.info("Remove old packages from %s" % (savePath))

            self.runRemoveOldPackages(savePath)
        return report
Пример #5
0
 def syncKickstarts(self, channelLabel, savePath, verbose=0):
     """
     channelLabel - channel to sync kickstarts from
     savePath - path to save kickstarts
     verbose - if true display more output
     """
     startTime = time.time()
     satDump = SatDumpClient(self.baseURL, verbose=verbose)
     ksLabels = satDump.getKickstartLabels(self.systemid, [channelLabel])
     LOG.info("Found %s kickstart labels for channel %s" % (len(ksLabels[channelLabel]), channelLabel))
     ksFiles = []
     for ksLbl in ksLabels[channelLabel]:
         LOG.info("Syncing kickstart label: %s" % (ksLbl))
         metadata = satDump.getKickstartTreeMetadata(self.systemid, [ksLbl])
         LOG.info("Retrieved metadata on %s files for kickstart label: %s" % (len(metadata[ksLbl]["files"]), ksLbl))
         ksSavePath = os.path.join(savePath, ksLbl)
         for ksFile in metadata[ksLbl]["files"]:
             info = {}
             info["relative-path"] = ksFile["relative-path"]
             info["size"] = ksFile["file-size"]
             info["md5sum"] = ksFile["md5sum"]
             info["ksLabel"] = ksLbl
             info["channelLabel"] = channelLabel
             info["savePath"] = ksSavePath
             ksFiles.append(info)
     ksFetch = KickstartFetch(self.systemid, self.baseURL)
     numThreads = int(self.parallel)
     self.parallelFetchKickstarts = ParallelFetch(ksFetch, numThreads)
     self.parallelFetchKickstarts.addItemList(ksFiles)
     self.parallelFetchKickstarts.start()
     report = self.parallelFetchKickstarts.waitForFinish()
     endTime = time.time()
     LOG.info("Processed %s %s %s kickstart files, %s errors, completed in %s seconds" \
             % (channelLabel, ksLabels[channelLabel], report.successes, 
                 report.errors, (endTime-startTime)))
     return report