示例#1
0
    def fetch(self, fetcher, progress):

        fetcher.reset()
        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        item = fetcher.enqueue(repomd)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps() - 1)
            if fetcher.getCaching() is NEVER:
                lines = [
                    _("Failed acquiring release file for '%s':") % self,
                    u"%s: %s" % (item.getURL(), item.getFailedReason())
                ]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = {}
        root = ElementTree.parse(item.getTargetPath()).getroot()
        for node in root.getchildren():
            if node.tag != DATA:
                continue
            type = node.get("type")
            info[type] = {}
            for subnode in node.getchildren():
                if subnode.tag == LOCATION:
                    info[type]["url"] = \
                        posixpath.join(self._baseurl, subnode.get("href"))
                if subnode.tag == CHECKSUM:
                    info[type][subnode.get("type")] = subnode.text
                if subnode.tag == OPENCHECKSUM:
                    info[type]["uncomp_"+subnode.get("type")] = \
                        subnode.text

        if "primary" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        fetcher.reset()
        item = fetcher.enqueue(info["primary"]["url"],
                               md5=info["primary"].get("md5"),
                               uncomp_md5=info["primary"].get("uncomp_md5"),
                               sha=info["primary"].get("sha"),
                               uncomp_sha=info["primary"].get("uncomp_sha"),
                               uncomp=True)
        flitem = fetcher.enqueue(
            info["filelists"]["url"],
            md5=info["filelists"].get("md5"),
            uncomp_md5=info["filelists"].get("uncomp_md5"),
            sha=info["filelists"].get("sha"),
            uncomp_sha=info["filelists"].get("uncomp_sha"),
            uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif (item.getStatus() == SUCCEEDED and flitem.getStatus() == FAILED
              and fetcher.getCaching() is ALWAYS):
            iface.warning(
                _("You must fetch channel information to "
                  "acquire needed filelists."))
            return False
        elif fetcher.getCaching() is NEVER:
            lines = [
                _("Failed acquiring information for '%s':") % self,
                u"%s: %s" % (item.getURL(), item.getFailedReason())
            ]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#2
0
    def fetch(self, fetcher, progress):
        
        fetcher.reset()
        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        item = fetcher.enqueue(repomd)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps()-1)
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring release file for '%s':") % self,
                         u"%s: %s" % (item.getURL(), item.getFailedReason())]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = {}
        root = ElementTree.parse(item.getTargetPath()).getroot()
        for node in root.getchildren():
            if node.tag != DATA:
                continue
            type = node.get("type")
            info[type] = {}
            for subnode in node.getchildren():
                if subnode.tag == LOCATION:
                    info[type]["url"] = \
                        posixpath.join(self._baseurl, subnode.get("href"))
                if subnode.tag == CHECKSUM:
                    info[type][subnode.get("type")] = subnode.text
                if subnode.tag == OPENCHECKSUM:
                    info[type]["uncomp_"+subnode.get("type")] = \
                        subnode.text

        if "primary" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        fetcher.reset()
        item = fetcher.enqueue(info["primary"]["url"],
                               md5=info["primary"].get("md5"),
                               uncomp_md5=info["primary"].get("uncomp_md5"),
                               sha=info["primary"].get("sha"),
                               uncomp_sha=info["primary"].get("uncomp_sha"),
                               uncomp=True)
        flitem = fetcher.enqueue(info["filelists"]["url"],
                                 md5=info["filelists"].get("md5"),
                                 uncomp_md5=info["filelists"].get("uncomp_md5"),
                                 sha=info["filelists"].get("sha"),
                                 uncomp_sha=info["filelists"].get("uncomp_sha"),
                                 uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath,
                                       self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif (item.getStatus() == SUCCEEDED and
              flitem.getStatus() == FAILED and
              fetcher.getCaching() is ALWAYS):
            iface.warning(_("You must fetch channel information to "
                            "acquire needed filelists."))
            return False
        elif fetcher.getCaching() is NEVER:
            lines = [_("Failed acquiring information for '%s':") % self,
                       u"%s: %s" % (item.getURL(), item.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#3
0
    def fetch(self, fetcher, progress):
        
        fetcher.reset()

        if self._mirrorlist:
            mirrorlist = self._mirrorlist
            item = fetcher.enqueue(mirrorlist)
            fetcher.run(progress=progress)

            if item.getStatus() is FAILED:
                progress.add(self.getFetchSteps()-1)
                if fetcher.getCaching() is NEVER:
                    iface.warning(_("Could not load mirror list. Continuing with base URL only."))
            else:
                self.loadMirrors(item.getTargetPath())

            fetcher.reset()
        else:
            progress.add(1)

        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        reposig = posixpath.join(self._baseurl, "repodata/repomd.xml.asc")

        oldinfo = {}
        path = self.getLocalPath(fetcher, repomd)
        if os.path.exists(path):
            try:
                oldinfo = self.loadMetadata(path)
            except Error:
                pass
        
        item = fetcher.enqueue(repomd)
        if self._fingerprint:
            gpgitem = fetcher.enqueue(reposig)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps()-1)
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring release file for '%s':") % self,
                         u"%s: %s" % (item.getURL(), item.getFailedReason())]
                raise Error, "\n".join(lines)
            return False

        if self._fingerprint:
            if gpgitem.getStatus() is FAILED:
                raise Error, \
                      _("Download of repomd.xml.asc failed for secure "
                        "channel '%s': %s") % (self, gpgitem.getFailedReason())

            status, output = commands.getstatusoutput(
                "gpg --batch --no-secmem-warning --status-fd 1 --verify "
                "%s %s" % (gpgitem.getTargetPath(), item.getTargetPath()))

            badsig = False
            goodsig = False
            validsig = None
            for line in output.splitlines():
                if line.startswith("[GNUPG:]"):
                    tokens = line[8:].split()
                    first = tokens[0]
                    if first == "VALIDSIG":
                        validsig = tokens[1]
                    elif first == "GOODSIG":
                        goodsig = True
                    elif first == "BADSIG":
                        badsig = True
            if badsig:
                raise Error, _("Channel '%s' has bad signature") % self
            if (not goodsig or
                (self._fingerprint and validsig != self._fingerprint)):
                raise Error, _("Channel '%s' signed with unknown key") % self

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = self.loadMetadata(item.getTargetPath())

        if "primary" not in info and "primary_lzma" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        if "primary_lzma" in info:
            primary = info["primary_lzma"]
        else:
            primary = info["primary"]
        if "filelists_lzma" in info:
            filelists = info["filelists_lzma"]
        else:
            filelists = info["filelists"]

        fetcher.reset()
        item = fetcher.enqueue(primary["url"],
                               md5=primary.get("md5"),
                               uncomp_md5=primary.get("uncomp_md5"),
                               sha=primary.get("sha"),
                               uncomp_sha=primary.get("uncomp_sha"),
                               sha256=primary.get("sha256"),
                               uncomp_sha256=primary.get("uncomp_sha256"),
                               uncomp=True)
        flitem = fetcher.enqueue(filelists["url"],
                                 md5=filelists.get("md5"),
                                 uncomp_md5=filelists.get("uncomp_md5"),
                                 sha=filelists.get("sha"),
                                 uncomp_sha=filelists.get("uncomp_sha"),
                                 sha256=filelists.get("sha256"),
                                 uncomp_sha256=filelists.get("uncomp_sha256"),
                                 uncomp=True)
        if "updateinfo" in info:
            uiitem = fetcher.enqueue(info["updateinfo"]["url"],
                                   md5=info["updateinfo"].get("md5"),
                                   uncomp_md5=info["updateinfo"].get("uncomp_md5"),
                                   sha=info["updateinfo"].get("sha"),
                                   uncomp_sha=info["updateinfo"].get("uncomp_sha"),
                                   uncomp=True)
        fetcher.run(progress=progress)
 
        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath,
                                       self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
            if "updateinfo" in info:
                if uiitem.getStatus() == SUCCEEDED:
                    localpath = uiitem.getTargetPath()
                    errata = RPMUpdateInfo(localpath)
                    errata.load()
                    errata.setErrataFlags()
                else:
                    iface.warning(_("Failed to download. You must fetch channel "
                        "information to acquire needed update information.\n"
                        "%s: %s") % (uiitem.getURL(), uiitem.getFailedReason()))
        elif (item.getStatus() == SUCCEEDED and
              flitem.getStatus() == FAILED and
              fetcher.getCaching() is ALWAYS):
            iface.warning(_("Failed to download. You must fetch channel "
                            "information to acquire needed filelists.\n"
                            "%s: %s") % (flitem.getURL(),
                            flitem.getFailedReason()))
            return False
        elif fetcher.getCaching() is NEVER:
            if item.getStatus() == FAILED:
                faileditem = item
            else:
                faileditem = flitem
            lines = [_("Failed acquiring information for '%s':") % self,
                       u"%s: %s" % (faileditem.getURL(),
                       faileditem.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        uncompressor = fetcher.getUncompressor()

        # delete any old files, if the new ones have new names
        for type in ["primary", "filelists", "other", 
                     "primary_lzma", "filelists_lzma", "other_lzma"]:
            if type in oldinfo:
                url = oldinfo[type]["url"]
                if url and info[type]["url"] != oldinfo[type]["url"]:
                    path = self.getLocalPath(fetcher, url)
                    if os.path.exists(path):
                       os.unlink(path)
                    handler = uncompressor.getHandler(path)
                    path = handler.getTargetPath(path)
                    if os.path.exists(path):
                       os.unlink(path)

        self._digest = digest

        return True