示例#1
0
    def fetch(self, fetcher, progress):
        
        fetcher.reset()
        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        item = fetcher.enqueue(repomd)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps()-1)
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring release file for '%s':") % self,
                         u"%s: %s" % (item.getURL(), item.getFailedReason())]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = {}
        try:
            root = ElementTree.parse(item.getTargetPath()).getroot()
        except expat.error, e:
            raise Error, _("Invalid XML file:\n  %s\n  %s\n  %s") % \
                          (item.getTargetPath(), repomd, str(e))
示例#2
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch packages file
        url = posixpath.join(self._baseurl, "PACKAGES.TXT")
        item = fetcher.enqueue(url)
        fetcher.run(progress=progress)
        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            self.removeLoaders()
            loader = SlackSiteLoader(localpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif fetcher.getCaching() is NEVER:
            lines = [
                _("Failed acquiring information for '%s':") % self,
                u"%s: %s" % (item.getURL(), item.getFailedReason())
            ]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#3
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch packages file
        url = posixpath.join(self._baseurl, "PACKAGES.TXT")
        item = fetcher.enqueue(url)
        fetcher.run(progress=progress)
        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            self.removeLoaders()
            loader = SlackSiteLoader(localpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif fetcher.getCaching() is NEVER:
            lines = [_("Failed acquiring information for '%s':") % self,
                     u"%s: %s" % (item.getURL(), item.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#4
0
    def fetch(self, fetcher, progress):

        pkginfourl = self._packageinfourl
        if not pkginfourl:
            pkginfourl = posixpath.join(self._baseurl, "packageinfo.xml.gz")

        fetcher.reset()
        item = fetcher.enqueue(pkginfourl, uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()

            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            self.removeLoaders()

            loader = RPMRedCarpetLoader(localpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif fetcher.getCaching() is NEVER:
            lines = [
                _("Failed acquiring information for '%s':") % self,
                u"%s: %s" % (item.getURL(), item.getFailedReason())
            ]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#5
0
    def fetch(self, fetcher, progress):

        pkginfourl = self._packageinfourl
        if not pkginfourl:
            pkginfourl = posixpath.join(self._baseurl, "packageinfo.xml.gz")

        fetcher.reset()
        item = fetcher.enqueue(pkginfourl, uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()

            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            self.removeLoaders()

            loader = RPMRedCarpetLoader(localpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif fetcher.getCaching() is NEVER:
            lines = [_("Failed acquiring information for '%s':") % self,
                     u"%s: %s" % (item.getURL(), item.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#6
0
 def fetch(self, fetcher, progress):
     getTS() # Make sure the db exists.
     dbdir = rpm_join_dbpath(sysconf.get("rpm-root", "/"),
                         sysconf.get("rpm-dbpath", "var/lib/rpm"))
     path = os.path.join(dbdir, "Packages")
     digest = getFileDigest(path)
     if digest == self._digest:
         return True
     self.removeLoaders()
     loader = RPMDBLoader()
     loader.setChannel(self)
     self._loaders.append(loader)
     self._digest = digest
     return True
示例#7
0
 def fetch(self, fetcher, progress):
     fetcher.reset()
     info = {}#{'uncomp': True}
     item = fetcher.enqueue(self._baseurl, **info)        
     fetcher.run(progress=progress, what=True)
     if item.getStatus() == SUCCEEDED:
         localpath = item.getTargetPath()
         digest = getFileDigest(localpath)
         if digest == self._digest:
             return True
     
         self.removeLoaders()
         loader = PacManXMLLoader(localpath)
         loader.setChannel(self)
         self._loaders.append(loader)
     else:
         return False
     self._digest = digest
     return True
示例#8
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch packages file
        url = posixpath.join(self._baseurl, self._dbfile)
        item = fetcher.enqueue(url, uncomp=True)
        flurl = posixpath.join(self._baseurl, self._flfile)
        flitem = fetcher.enqueue(flurl, uncomp=True)
        fetcher.run(progress=progress)
        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            if flitem.getStatus() == SUCCEEDED:
                filespath = flitem.getTargetPath()
            else:
                iface.warning(_("Failed to download. You must fetch channel "
                                "information to acquire needed filelists.\n"
                                "%s: %s") % (flitem.getURL(),
                                flitem.getFailedReason()))
                filespath = None
            self.removeLoaders()
            loader = ArchSiteLoader(localpath, filespath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif fetcher.getCaching() is NEVER:
            lines = [_("Failed acquiring information for '%s':") % self,
                     u"%s: %s" % (item.getURL(), item.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#9
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch release file
        item = fetcher.enqueue(posixpath.join(self._baseurl, "base/release"))
        fetcher.run(progress=progress)
        failed = item.getFailedReason()
        if failed:
            progress.add(self.getFetchSteps()-1)
            progress.show()
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring information for '%s':") % self,
                         "%s: %s" % (item.getURL(), failed)]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(self.getFetchSteps()-1)
            progress.show()
            return True
        self.removeLoaders()

        # Parse release file
        md5sum = {}
        insidemd5sum = False
        hassignature = False
        for line in open(item.getTargetPath()):
            if line.startswith("-----BEGIN"):
                hassignature = True
                break
            elif not insidemd5sum:
                if line.startswith("MD5Sum:"):
                    insidemd5sum = True
            elif not line.startswith(" "):
                insidemd5sum = False
            else:
                try:
                    md5, size, path = line.split()
                except ValueError:
                    pass
                else:
                    md5sum[path] = (md5, int(size))

        if self._fingerprint:
            rfd, rname = tempfile.mkstemp()
            sfd, sname = tempfile.mkstemp()
            rfile = os.fdopen(rfd, "w")
            sfile = os.fdopen(sfd, "w")
            try:
                if not hassignature:
                    raise Error, _("Channel '%s' has fingerprint but is not "
                                   "signed") % self

                file = rfile
                for line in open(item.getTargetPath()):
                    if line.startswith("-----BEGIN"):
                        file = sfile
                    file.write(line)
                rfile.close()
                sfile.close()

                status, output = commands.getstatusoutput(
                    "gpg --batch --no-secmem-warning --status-fd 1 "
                    "--verify %s %s" % (sname, rname))

                badsig = False
                goodsig = False
                validsig = None
                for line in output.splitlines():
                    if line.startswith("[GNUPG:]"):
                        tokens = line[8:].split()
                        first = tokens[0]
                        if first == "VALIDSIG":
                            validsig = tokens[1]
                        elif first == "GOODSIG":
                            goodsig = True
                        elif first == "BADSIG":
                            badsig = True
                if badsig:
                    raise Error, _("Channel '%s' has bad signature") % self
                if not goodsig or validsig != self._fingerprint:
                    raise Error, _("Channel '%s' signed with unknown key")%self
            except Error, e:
                progress.add(self.getFetchSteps()-1)
                progress.show()
                rfile.close()
                sfile.close()
                os.unlink(rname)
                os.unlink(sname)
                if fetcher.getCaching() is NEVER:
                    raise
                else:
                    return False
            else:
                os.unlink(rname)
                os.unlink(sname)
示例#10
0
    def fetch(self, fetcher, progress):
        
        fetcher.reset()

        if self._mirrorlist:
            mirrorlist = self._mirrorlist
            item = fetcher.enqueue(mirrorlist)
            fetcher.run(progress=progress)

            if item.getStatus() is FAILED:
                progress.add(self.getFetchSteps()-1)
                if fetcher.getCaching() is NEVER:
                    iface.warning(_("Could not load mirror list. Continuing with base URL only."))
            else:
                self.loadMirrors(item.getTargetPath())

            fetcher.reset()
        else:
            progress.add(1)

        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        reposig = posixpath.join(self._baseurl, "repodata/repomd.xml.asc")

        oldinfo = {}
        path = self.getLocalPath(fetcher, repomd)
        if os.path.exists(path):
            try:
                oldinfo = self.loadMetadata(path)
            except Error:
                pass
        
        item = fetcher.enqueue(repomd)
        if self._fingerprint:
            gpgitem = fetcher.enqueue(reposig)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps()-1)
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring release file for '%s':") % self,
                         u"%s: %s" % (item.getURL(), item.getFailedReason())]
                raise Error, "\n".join(lines)
            return False

        if self._fingerprint:
            if gpgitem.getStatus() is FAILED:
                raise Error, \
                      _("Download of repomd.xml.asc failed for secure "
                        "channel '%s': %s") % (self, gpgitem.getFailedReason())

            status, output = commands.getstatusoutput(
                "gpg --batch --no-secmem-warning --status-fd 1 --verify "
                "%s %s" % (gpgitem.getTargetPath(), item.getTargetPath()))

            badsig = False
            goodsig = False
            validsig = None
            for line in output.splitlines():
                if line.startswith("[GNUPG:]"):
                    tokens = line[8:].split()
                    first = tokens[0]
                    if first == "VALIDSIG":
                        validsig = tokens[1]
                    elif first == "GOODSIG":
                        goodsig = True
                    elif first == "BADSIG":
                        badsig = True
            if badsig:
                raise Error, _("Channel '%s' has bad signature") % self
            if (not goodsig or
                (self._fingerprint and validsig != self._fingerprint)):
                raise Error, _("Channel '%s' signed with unknown key") % self

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = self.loadMetadata(item.getTargetPath())

        if "primary" not in info and "primary_lzma" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        if "primary_lzma" in info:
            primary = info["primary_lzma"]
        else:
            primary = info["primary"]
        if "filelists_lzma" in info:
            filelists = info["filelists_lzma"]
        else:
            filelists = info["filelists"]

        fetcher.reset()
        item = fetcher.enqueue(primary["url"],
                               md5=primary.get("md5"),
                               uncomp_md5=primary.get("uncomp_md5"),
                               sha=primary.get("sha"),
                               uncomp_sha=primary.get("uncomp_sha"),
                               sha256=primary.get("sha256"),
                               uncomp_sha256=primary.get("uncomp_sha256"),
                               uncomp=True)
        flitem = fetcher.enqueue(filelists["url"],
                                 md5=filelists.get("md5"),
                                 uncomp_md5=filelists.get("uncomp_md5"),
                                 sha=filelists.get("sha"),
                                 uncomp_sha=filelists.get("uncomp_sha"),
                                 sha256=filelists.get("sha256"),
                                 uncomp_sha256=filelists.get("uncomp_sha256"),
                                 uncomp=True)
        if "updateinfo" in info:
            uiitem = fetcher.enqueue(info["updateinfo"]["url"],
                                   md5=info["updateinfo"].get("md5"),
                                   uncomp_md5=info["updateinfo"].get("uncomp_md5"),
                                   sha=info["updateinfo"].get("sha"),
                                   uncomp_sha=info["updateinfo"].get("uncomp_sha"),
                                   uncomp=True)
        fetcher.run(progress=progress)
 
        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath,
                                       self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
            if "updateinfo" in info:
                if uiitem.getStatus() == SUCCEEDED:
                    localpath = uiitem.getTargetPath()
                    errata = RPMUpdateInfo(localpath)
                    errata.load()
                    errata.setErrataFlags()
                else:
                    iface.warning(_("Failed to download. You must fetch channel "
                        "information to acquire needed update information.\n"
                        "%s: %s") % (uiitem.getURL(), uiitem.getFailedReason()))
        elif (item.getStatus() == SUCCEEDED and
              flitem.getStatus() == FAILED and
              fetcher.getCaching() is ALWAYS):
            iface.warning(_("Failed to download. You must fetch channel "
                            "information to acquire needed filelists.\n"
                            "%s: %s") % (flitem.getURL(),
                            flitem.getFailedReason()))
            return False
        elif fetcher.getCaching() is NEVER:
            if item.getStatus() == FAILED:
                faileditem = item
            else:
                faileditem = flitem
            lines = [_("Failed acquiring information for '%s':") % self,
                       u"%s: %s" % (faileditem.getURL(),
                       faileditem.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        uncompressor = fetcher.getUncompressor()

        # delete any old files, if the new ones have new names
        for type in ["primary", "filelists", "other", 
                     "primary_lzma", "filelists_lzma", "other_lzma"]:
            if type in oldinfo:
                url = oldinfo[type]["url"]
                if url and info[type]["url"] != oldinfo[type]["url"]:
                    path = self.getLocalPath(fetcher, url)
                    if os.path.exists(path):
                       os.unlink(path)
                    handler = uncompressor.getHandler(path)
                    path = handler.getTargetPath(path)
                    if os.path.exists(path):
                       os.unlink(path)

        self._digest = digest

        return True
示例#11
0
def main(ctrl, opts):

    if opts.add:
        if len(opts.add) == 1:
            opts.add = read_mirrors(ctrl, opts.add[0])
        if len(opts.add) % 2 != 0:
            raise Error, _("Invalid arguments for --add")
        for i in range(0,len(opts.add),2):
            origin, mirror = opts.add[i:i+2]
            if mirror:
                sysconf.add(("mirrors", origin), mirror, unique=True)

    if opts.remove:
        if len(opts.remove) == 1:
            opts.remove = read_mirrors(ctrl, opts.remove[0])
        if len(opts.remove) % 2 != 0:
            raise Error, _("Invalid arguments for --remove")
        for i in range(0,len(opts.remove),2):
            origin, mirror = opts.remove[i:i+2]
            if not sysconf.has(("mirrors", origin)):
                iface.warning(_("Origin not found: %s") % origin)
            if not sysconf.remove(("mirrors", origin), mirror):
                iface.warning(_("Mirror not found: %s") % mirror)

    if opts.remove_all:
        for origin in opts.remove_all:
            if not sysconf.remove(("mirrors", origin)):
                iface.warning(_("Origin not found: %s") % origin)

    if opts.sync:
        reset = {}
        lst = read_mirrors(ctrl, opts.sync)
        for i in range(0,len(lst),2):
            origin, mirror = lst[i:i+2]
            if origin not in reset:
                reset[origin] = True
                sysconf.remove(("mirrors", origin))
            if mirror:
                sysconf.add(("mirrors", origin), mirror, unique=True)

    if opts.clear_history is not None:
        if opts.clear_history:
            history = sysconf.get("mirrors-history", [])
            history[:] = [x for x in history if x[0] not in opts.clear_history]
            sysconf.set("mirrors-history", history)
        else:
            history = sysconf.remove("mirrors-history")

    if opts.show:
        mirrors = sysconf.get("mirrors", ())
        for origin in mirrors:
            print origin
            for mirror in mirrors[origin]:
                print "   ", mirror
            print

    if opts.edit:
        sysconf.assertWritable()
        
        fd, name = tempfile.mkstemp(".txt")
        file = os.fdopen(fd, "w")
        print >>file, EDITEXAMPLE
        origins = sysconf.keys("mirrors")
        origins.sort()
        for origin in origins:
            print >>file, origin
            mirrors = sysconf.get(("mirrors", origin))
            for mirror in mirrors:
                print >>file, "   ", mirror
            print >>file
        file.close()
        editor = os.environ.get("EDITOR", "vi")
        olddigest = getFileDigest(name)
        while True:
            os.system("%s %s" % (editor, name))
            newdigest = getFileDigest(name)
            if newdigest == olddigest:
                break
            try:
                lst = read_mirrors(ctrl, name)
            except Error, e:
                iface.error(unicode(e))
                if not iface.askYesNo(_("Continue?"), True):
                    break
                else:continue
            newmirrors = {}
            for i in range(0,len(lst),2):
                origin, mirror = lst[i:i+2]
                newmirrors.setdefault(origin, []).append(mirror)
            sysconf.set("mirrors", newmirrors)
            break
        os.unlink(name)
示例#12
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch release file
        item = fetcher.enqueue(self._getURL("Release"))
        gpgitem = fetcher.enqueue(self._getURL("Release.gpg"))
        fetcher.run(progress=progress)
        failed = item.getFailedReason()
        if failed:
            progress.add(self.getFetchSteps() - 2)
            progress.show()
            if fetcher.getCaching() is NEVER:
                lines = [
                    _("Failed acquiring information for '%s':") % self,
                    u"%s: %s" % (item.getURL(), failed)
                ]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(self.getFetchSteps() - 2)
            progress.show()
            return True
        self.removeLoaders()

        # Parse release file
        md5sum = {}
        insidemd5sum = False
        for line in open(item.getTargetPath()):
            if not insidemd5sum:
                if line.startswith("MD5Sum:"):
                    insidemd5sum = True
            elif not line.startswith(" "):
                insidemd5sum = False
            else:
                try:
                    md5, size, path = line.split()
                except ValueError:
                    pass
                else:
                    md5sum[path] = (md5, int(size))

        if self._fingerprint:
            try:
                failed = gpgitem.getFailedReason()
                if failed:
                    raise Error, _(
                        "Channel '%s' has fingerprint but download "
                        "of Release.gpg failed: %s") % (self, failed)

                status, output = commands.getstatusoutput(
                    "gpg --batch --no-secmem-warning --status-fd 1 "
                    "--verify %s %s" %
                    (gpgitem.getTargetPath(), item.getTargetPath()))

                badsig = False
                goodsig = False
                validsig = None
                for line in output.splitlines():
                    if line.startswith("[GNUPG:]"):
                        tokens = line[8:].split()
                        first = tokens[0]
                        if first == "VALIDSIG":
                            validsig = tokens[1]
                        elif first == "GOODSIG":
                            goodsig = True
                        elif first == "BADSIG":
                            badsig = True
                if badsig:
                    raise Error, _("Channel '%s' has bad signature") % self
                if not goodsig or validsig != self._fingerprint:
                    raise Error, _("Channel '%s' signed with unknown key") \
                                 % self
            except Error, e:
                progress.add(self.getFetchSteps() - 2)
                progress.show()
                if fetcher.getCaching() is NEVER:
                    raise
                else:
                    return False
示例#13
0
文件: yast2.py 项目: Kampi/Zybo-Linux
    def fetch(self, fetcher, progress):

        # Fetch media information file
        # This file contains the timestamp info
        # that says if the repository has changed
        fetchitem = posixpath.join(self._baseurl, "media.1/media")
        fetched = self.__fetchFile(fetchitem, fetcher, progress)
        if fetched.getStatus() == FAILED:
            return False

        digest = getFileDigest(fetched.getTargetPath())
        #if digest == self._digest and getattr(self, "force-yast", False):
        if digest == self._digest:
            return True

        # Find location of description files
        fetchitem = posixpath.join(self._baseurl, "content")
        fetched = self.__fetchFile(fetchitem, fetcher, progress)
        if fetched.getStatus() == FAILED:
            return False

        descrdir = "suse/setup/descr"
        datadir = "RPMS"
        uncompress = self._compressed
        for line in open(fetched.getTargetPath()):
            line = line.strip()
            try:
                key, rest = line.split(None, 1)
            except ValueError:
                continue

            if key == "DESCRDIR":
                descrdir = rest
            elif key == "DATADIR":
                datadir = rest
            elif key == "META":
                # Autodetect compressed/uncompressed SuSEtags metadata.
                if rest.endswith("packages"):
                    uncompress = False
                elif rest.endswith("packages.gz"):
                    uncompress = True

        # Fetch package information (req, dep, prov, etc)
        fetchitem = posixpath.join(self._baseurl, "%s/packages" % descrdir)
        if uncompress:
            fetchitem += ".gz"
        fetched = self.__fetchFile(fetchitem, fetcher, progress, uncompress)
        if fetched.getStatus() == FAILED:
            return False

        self.removeLoaders()

        pkginfofile = fetched.getTargetPath()
        header = open(pkginfofile).readline().strip()
        if header == "=Ver: 2.0":
            fetchitem = posixpath.join(self._baseurl,
                                       "%s/packages.en" % descrdir)
            if uncompress:
                fetchitem += ".gz"

            fetched = self.__fetchFile(fetchitem, fetcher,
                                       progress, uncompress)

            if (fetched.getStatus() == FAILED or
                open(fetched.getTargetPath()).readline().strip() != "=Ver: 2.0"):
                raise Error, "YaST2 package descriptions not loaded."
            else:
                pkgdescfile = fetched.getTargetPath()
                loader = YaST2Loader(self._baseurl, datadir,
                                     pkginfofile, pkgdescfile)

            loader.setChannel(self)
            self._loaders.append(loader)
        else:
            raise Error, _("Invalid package file header (%s)" % header)

        self._digest = digest

        return True
示例#14
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        if self._mirrorurl:
            mirrorlist = self._mirrorurl
            item = fetcher.enqueue(mirrorlist)
            fetcher.run(progress=progress)

            if item.getStatus() is FAILED:
                progress.add(self.getFetchSteps()-1)
                if fetcher.getCaching() is NEVER:
                    iface.warning(_("Could not load mirror list. Continuing with base URL only."))
            else:
                self._mirrors.clear()
                mirrorurls = []
                mirrors = self.loadMirrors(item.getTargetPath())
                for mirror in mirrors:
                    scheme = mirror["url"].split(":")[0]
                    if not fetcher.getHandler(scheme, None):
                        continue
                    if mirror["type"] != "distrib":
                        continue
                    mirrorurls.append(mirror["url"])
                if mirrorurls:
                    self._mirrors[self._baseurl] = mirrorurls

            fetcher.reset()
        else:
            progress.add(1)

        self._compareurl = self._hdlurl

        hdlbaseurl, basename = os.path.split(self._hdlurl)
        infoname = os.path.split(self._infourl)[1]

        md5url = posixpath.join(hdlbaseurl, "MD5SUM")
        item = fetcher.enqueue(md5url)
        fetcher.run(progress=progress)
        hdlmd5 = None
        infomd5 = None
        failed = item.getFailedReason()
        if not failed:
            self._compareurl = md5url
            digest = getFileDigest(item.getTargetPath())
            if digest == self._digest:
                progress.add(3)
                return True

            basename = posixpath.basename(self._hdlurl)
            infoname = posixpath.basename(self._infourl)
            try:
                for line in open(item.getTargetPath()):
                    line = line.strip()
                    if line:
                        md5, name = line.split()
                        if name == basename:
                            hdlmd5 = md5
                        if name == infoname:
                            infomd5 = md5
            except ValueError:
                pass

        fetcher.reset()
        hdlitem = fetcher.enqueue(self._hdlurl, md5=hdlmd5, uncomp=True)
        if infomd5:
            infoitem = fetcher.enqueue(self._infourl, md5=infomd5, uncomp=True)
        else:
            progress.add(1) 
            infoitem = None

        # do not get "descriptions" on non "update" media
        if self.getName().find("Updates") == -1:
            progress.add(1)
            descitem = None
        else:
            descurl = posixpath.join(hdlbaseurl, "descriptions")
            descitem = fetcher.enqueue(descurl)

        fetcher.run(progress=progress)

        if hdlitem.getStatus() == FAILED:
            hdfailed = hdlitem.getFailedReason()
            if fetcher.getCaching() is NEVER:
                # Try reading reconfig.urpmi (should give new path)
                fetcher.reset()
                reconfigurl = posixpath.join(hdlbaseurl, "reconfig.urpmi")
                reconfigitem = fetcher.enqueue(reconfigurl)
                fetcher.run(progress=progress)
                if reconfigitem.getStatus() == FAILED:
                    refailed = reconfigitem.getFailedReason()
                    if fetcher.getCaching() is NEVER:
                        lines = [_("Failed acquiring information for '%s':") % self,
                            u"%s: %s" % (hdlitem.getURL(), hdfailed),
                            u"%s: %s" % (reconfigitem.getURL(), refailed)]
                        raise Error, "\n".join(lines)
                    return False
                else:
                    # Need to inject "/" at the end to avoid buggy urls
                    if not hdlbaseurl.endswith("/"): hdlbaseurl += "/"
                    for line in open(reconfigitem.getTargetPath()):
                        if line.startswith("#"): pass
                        elif line:
                            splitline = line.split()
                            arch = os.uname()[4]
                            if arch == "i686": arch = "i586"
                            reconfarch = re.sub("\$ARCH", arch, splitline[1])
                            reconfpath = re.sub(splitline[0] + "$", reconfarch, hdlbaseurl)
                            sysconf.set(("channels", self.getAlias(), \
                                        "baseurl"), reconfpath)
                            self._hdlurl = os.path.join(reconfpath, basename)
                            self._infourl = os.path.join(reconfpath, infoname)
                    return self.fetch(fetcher, progress)
            return False
        else:
            localpath = hdlitem.getTargetPath()
            digestpath = None
            infopath = None
            listpath = None
            if self._compareurl == self._hdlurl:
                digestpath = localpath
            if digestpath:
                digest = getFileDigest(digestpath)
                if digest == self._digest:
                    return True
            self.removeLoaders()
            if localpath.endswith(".cz"):
                if (not os.path.isfile(localpath[:-3]) or
                    fetcher.getCaching() != ALWAYS):
                    linkpath = fetcher.getLocalPath(hdlitem)
                    
                    # different old vs new cz format detection
                    #ext = commands.getoutput("file "+linkpath+"|awk -F'cz: ' '{print $2}'|cut -d' ' -f1")

                    ms = magic.open(magic.NONE)
                    ms.load()
                    ext = ms.file(linkpath).split(' ')[0]

                    #os.system('echo ' + ext + '>/tmp/RUN')
                    if ext == "gzip":
                	linkpath = linkpath[:-2]+"gz"
            	    else:
                	linkpath = linkpath[:-2]+"xz"

                    ms.close()
                    if not os.access(os.path.dirname(linkpath), os.W_OK):
                        dirname = os.path.join(sysconf.get("user-data-dir"),
                                               "channels")
                        basename = os.path.basename(linkpath)
                        if not os.path.isdir(dirname):
                            os.makedirs(dirname)
                        linkpath = os.path.join(dirname, basename)
                    if os.path.isfile(linkpath):
                        os.unlink(linkpath)
                    os.symlink(localpath, linkpath)
                    localpath = linkpath
                    uncompressor = fetcher.getUncompressor()
                    uncomphandler = uncompressor.getHandler(linkpath)
                    try:
                        uncomphandler.uncompress(linkpath)
                    except Error, e:
                        # cz file has trailing information which breaks
                        # current gzip module logic.
                        if "Not a gzipped file" not in e[0]:
                            os.unlink(linkpath)
                            raise
                    os.unlink(linkpath)
                localpath = localpath[:-3]
            if infoitem and infoitem.getStatus() == SUCCEEDED:
                infopath = infoitem.getTargetPath()
            elif infoitem and infoitem.getStatus() == FAILED:
                lines = [_("Failed acquiring information for '%s':") % self,
                    u"%s: %s" % (infoitem.getURL(), infoitem.getFailedReason())]
                raise Warning, "\n".join(lines)

            flagdict = {}
            if descitem and descitem.getStatus() == SUCCEEDED:
                descpath = descitem.getTargetPath()
                errata = RPMDescriptions(descpath)
                errata.load()
                #errata.setErrataFlags() <-- done in loader
                flagdict = errata.getErrataFlags()
            
            baseurl = self._baseurl
            directory = self._directory
            if directory:
                baseurl += "/" + directory + "/"
            if open(localpath).read(4) == "\x8e\xad\xe8\x01":
                loader = URPMILoader(localpath, baseurl, listpath)
            else:
                loader = URPMISynthesisLoader(localpath, baseurl, listpath, infopath)
            # need to set flags while loading
            loader.setErrataFlags(flagdict)
                                
            loader.setChannel(self)
            self._loaders.append(loader)
示例#15
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        if self._mirrorurl:
            mirrorlist = self._mirrorurl
            item = fetcher.enqueue(mirrorlist)
            fetcher.run(progress=progress)

            if item.getStatus() is FAILED:
                progress.add(self.getFetchSteps()-1)
                if fetcher.getCaching() is NEVER:
                    iface.warning(_("Could not load mirror list. Continuing with base URL only."))
            else:
                self._mirrors.clear()
                mirrorurls = []
                mirrors = self.loadMirrors(item.getTargetPath())
                for mirror in mirrors:
                    scheme = mirror["url"].split(":")[0]
                    if not fetcher.getHandler(scheme, None):
                        continue
                    if mirror["type"] != "distrib":
                        continue
                    mirrorurls.append(mirror["url"])
                if mirrorurls:
                    self._mirrors[self._baseurl] = mirrorurls

            fetcher.reset()
        else:
            progress.add(1)

        self._compareurl = self._hdlurl

        hdlbaseurl, basename = os.path.split(self._hdlurl)
        infoname = os.path.split(self._infourl)[1]

        md5url = posixpath.join(hdlbaseurl, "MD5SUM")
        item = fetcher.enqueue(md5url)
        fetcher.run(progress=progress)
        hdlmd5 = None
        infomd5 = None
        failed = item.getFailedReason()
        if not failed:
            self._compareurl = md5url
            digest = getFileDigest(item.getTargetPath())
            if digest == self._digest:
                progress.add(3)
                return True

            basename = posixpath.basename(self._hdlurl)
            infoname = posixpath.basename(self._infourl)
            try:
                for line in open(item.getTargetPath()):
                    line = line.strip()
                    if line:
                        md5, name = line.split()
                        if name == basename:
                            hdlmd5 = md5
                        if name == infoname:
                            infomd5 = md5
            except ValueError:
                pass

        fetcher.reset()
        hdlitem = fetcher.enqueue(self._hdlurl, md5=hdlmd5, uncomp=True)
        if infomd5:
            infoitem = fetcher.enqueue(self._infourl, md5=infomd5, uncomp=True)
        else:
            progress.add(1) 
            infoitem = None

        # do not get "descriptions" on non "update" media
        if self.getName().find("Updates") == -1:
            progress.add(1)
            descitem = None
        else:
            descurl = posixpath.join(hdlbaseurl, "descriptions")
            descitem = fetcher.enqueue(descurl)

        fetcher.run(progress=progress)

        if hdlitem.getStatus() == FAILED:
            hdfailed = hdlitem.getFailedReason()
            if fetcher.getCaching() is NEVER:
                # Try reading reconfig.urpmi (should give new path)
                fetcher.reset()
                reconfigurl = posixpath.join(hdlbaseurl, "reconfig.urpmi")
                reconfigitem = fetcher.enqueue(reconfigurl)
                fetcher.run(progress=progress)
                if reconfigitem.getStatus() == FAILED:
                    refailed = reconfigitem.getFailedReason()
                    if fetcher.getCaching() is NEVER:
                        lines = [_("Failed acquiring information for '%s':") % self,
                            u"%s: %s" % (hdlitem.getURL(), hdfailed),
                            u"%s: %s" % (reconfigitem.getURL(), refailed)]
                        raise Error, "\n".join(lines)
                    return False
                else:
                    # Need to inject "/" at the end to avoid buggy urls
                    if not hdlbaseurl.endswith("/"): hdlbaseurl += "/"
                    for line in open(reconfigitem.getTargetPath()):
                        if line.startswith("#"): pass
                        elif line:
                            splitline = line.split()
                            arch = os.uname()[4]
                            if arch == "i686": arch = "i586"
                            reconfarch = re.sub("\$ARCH", arch, splitline[1])
                            reconfpath = re.sub(splitline[0] + "$", reconfarch, hdlbaseurl)
                            sysconf.set(("channels", self.getAlias(), \
                                        "baseurl"), reconfpath)
                            self._hdlurl = os.path.join(reconfpath, basename)
                            self._infourl = os.path.join(reconfpath, infoname)
                    return self.fetch(fetcher, progress)
            return False
        else:
            localpath = hdlitem.getTargetPath()
            digestpath = None
            infopath = None
            listpath = None
            if self._compareurl == self._hdlurl:
                digestpath = localpath
            if digestpath:
                digest = getFileDigest(digestpath)
                if digest == self._digest:
                    return True
            self.removeLoaders()
            if localpath.endswith(".cz"):
                if (not os.path.isfile(localpath[:-3]) or
                    fetcher.getCaching() != ALWAYS):
                    linkpath = fetcher.getLocalPath(hdlitem)
                    linkpath = linkpath[:-2]+"gz"
                    if not os.access(os.path.dirname(linkpath), os.W_OK):
                        dirname = os.path.join(sysconf.get("user-data-dir"),
                                               "channels")
                        basename = os.path.basename(linkpath)
                        if not os.path.isdir(dirname):
                            os.makedirs(dirname)
                        linkpath = os.path.join(dirname, basename)
                    if os.path.isfile(linkpath):
                        os.unlink(linkpath)
                    os.symlink(localpath, linkpath)
                    localpath = linkpath
                    uncompressor = fetcher.getUncompressor()
                    uncomphandler = uncompressor.getHandler(linkpath)
                    try:
                        uncomphandler.uncompress(linkpath)
                    except Error, e:
                        # cz file has trailing information which breaks
                        # current gzip module logic.
                        if "Not a gzipped file" not in e[0]:
                            os.unlink(linkpath)
                            raise
                    os.unlink(linkpath)
                localpath = localpath[:-3]
            if infoitem and infoitem.getStatus() == SUCCEEDED:
                infopath = infoitem.getTargetPath()
            elif infoitem and infoitem.getStatus() == FAILED:
                lines = [_("Failed acquiring information for '%s':") % self,
                    u"%s: %s" % (infoitem.getURL(), infoitem.getFailedReason())]
                raise Warning, "\n".join(lines)

            flagdict = {}
            if descitem and descitem.getStatus() == SUCCEEDED:
                descpath = descitem.getTargetPath()
                errata = RPMDescriptions(descpath)
                errata.load()
                #errata.setErrataFlags() <-- done in loader
                flagdict = errata.getErrataFlags()
            
            baseurl = self._baseurl
            directory = self._directory
            if directory:
                baseurl += "/" + directory + "/"
            if open(localpath).read(4) == "\x8e\xad\xe8\x01":
                loader = URPMILoader(localpath, baseurl, listpath)
            else:
                loader = URPMISynthesisLoader(localpath, baseurl, listpath, infopath)
            # need to set flags while loading
            loader.setErrataFlags(flagdict)
                                
            loader.setChannel(self)
            self._loaders.append(loader)
示例#16
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch release file
        item = fetcher.enqueue(posixpath.join(self._baseurl, "base/release"))
        fetcher.run(progress=progress)
        failed = item.getFailedReason()
        if failed:
            progress.add(self.getFetchSteps()-1)
            progress.show()
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring information for '%s':") % self,
                         "%s: %s" % (item.getURL(), failed)]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(self.getFetchSteps()-1)
            progress.show()
            return True
        self.removeLoaders()

        # Parse release file
        md5sum = {}
        insidemd5sum = False
        hassignature = False
        for line in open(item.getTargetPath()):
            if line.startswith("-----BEGIN"):
                hassignature = True
                break
            elif not insidemd5sum:
                if line.startswith("MD5Sum:"):
                    insidemd5sum = True
            elif not line.startswith(" "):
                insidemd5sum = False
            else:
                try:
                    md5, size, path = line.split()
                except ValueError:
                    pass
                else:
                    md5sum[path] = (md5, int(size))

        if self._fingerprint:
            rfd, rname = tempfile.mkstemp()
            sfd, sname = tempfile.mkstemp()
            rfile = os.fdopen(rfd, "w")
            sfile = os.fdopen(sfd, "w")
            try:
                if not hassignature:
                    raise Error, _("Channel '%s' has fingerprint but is not "
                                   "signed") % self

                file = rfile
                for line in open(item.getTargetPath()):
                    if line.startswith("-----BEGIN"):
                        file = sfile
                    file.write(line)
                rfile.close()
                sfile.close()

                status, output = commands.getstatusoutput(
                    "gpg --batch --no-secmem-warning --status-fd 1 "
                    "--verify %s %s" % (sname, rname))

                badsig = False
                goodsig = False
                validsig = None
                for line in output.splitlines():
                    if line.startswith("[GNUPG:]"):
                        tokens = line[8:].split()
                        first = tokens[0]
                        if first == "VALIDSIG":
                            validsig = tokens[1]
                        elif first == "GOODSIG":
                            goodsig = True
                        elif first == "BADSIG":
                            badsig = True
                if badsig:
                    raise Error, _("Channel '%s' has bad signature") % self
                if not goodsig or validsig != self._fingerprint:
                    raise Error, _("Channel '%s' signed with unknown key")%self
            except Error, e:
                progress.add(self.getFetchSteps()-1)
                progress.show()
                rfile.close()
                sfile.close()
                os.unlink(rname)
                os.unlink(sname)
                if fetcher.getCaching() is NEVER:
                    raise
                else:
                    return False
            else:
                os.unlink(rname)
                os.unlink(sname)
示例#17
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        self._compareurl = self._hdlurl

        hdlbaseurl, basename = os.path.split(self._hdlurl)
        md5url = posixpath.join(hdlbaseurl, "MD5SUM")
        item = fetcher.enqueue(md5url)
        fetcher.run(progress=progress)
        hdlmd5 = None
        failed = item.getFailedReason()
        if not failed:
            self._compareurl = md5url
            digest = getFileDigest(item.getTargetPath())
            if digest == self._digest:
                progress.add(2)
                return True

            basename = posixpath.basename(self._hdlurl)
            for line in open(item.getTargetPath()):
                md5, name = line.split()
                if name == basename:
                    hdlmd5 = md5
                    break

        fetcher.reset()
        hdlitem = fetcher.enqueue(self._hdlurl, md5=hdlmd5, uncomp=True)

        if self._hdlurl.endswith("/list"):
            listitem = None
        else:
            m = re.compile(r"/(?:synthesis\.)?hdlist(.*)\.") \
                  .search(self._hdlurl)
            suffix = m and m.group(1) or ""
            listurl = posixpath.join(hdlbaseurl, "list%s" % suffix)
            listitem = fetcher.enqueue(listurl, uncomp=True)

        fetcher.run(progress=progress)

        if hdlitem.getStatus() == FAILED:
            failed = hdlitem.getFailedReason()
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring information for '%s':") % self,
                         u"%s: %s" % (hdlitem.getURL(), failed)]
                raise Error, "\n".join(lines)
            return False
        else:
            localpath = hdlitem.getTargetPath()
            digestpath = None
            if listitem and listitem.getStatus() == SUCCEEDED:
                if self._compareurl == self._hdlurl:
                    self._compareurl = listurl
                    digestpath = localpath
                listpath = listitem.getTargetPath()
            else:
                listpath = None
                if self._compareurl == self._hdlurl:
                    digestpath = localpath
            if digestpath:
                digest = getFileDigest(digestpath)
                if digest == self._digest:
                    return True
            self.removeLoaders()
            if localpath.endswith(".cz"):
                if (not os.path.isfile(localpath[:-3]) or
                    fetcher.getCaching() != ALWAYS):
                    linkpath = fetcher.getLocalPath(hdlitem)
                    linkpath = linkpath[:-2]+"gz"
                    if not os.access(os.path.dirname(linkpath), os.W_OK):
                        dirname = os.path.join(sysconf.get("user-data-dir"),
                                               "channels")
                        basename = os.path.basename(linkpath)
                        if not os.path.isdir(dirname):
                            os.makedirs(dirname)
                        linkpath = os.path.join(dirname, basename)
                    if os.path.isfile(linkpath):
                        os.unlink(linkpath)
                    os.symlink(localpath, linkpath)
                    localpath = linkpath
                    uncompressor = fetcher.getUncompressor()
                    uncomphandler = uncompressor.getHandler(linkpath)
                    try:
                        uncomphandler.uncompress(linkpath)
                    except Error, e:
                        # cz file has trailing information which breaks
                        # current gzip module logic.
                        if "Not a gzipped file" not in e[0]:
                            os.unlink(linkpath)
                            raise
                    os.unlink(linkpath)
                localpath = localpath[:-3]

            if open(localpath).read(4) == "\x8e\xad\xe8\x01":
                loader = URPMILoader(localpath, self._baseurl, listpath)
            else:
                loader = URPMISynthesisLoader(localpath, self._baseurl, listpath)

            loader.setChannel(self)
            self._loaders.append(loader)
示例#18
0
def main(ctrl, opts):

    if opts.add:
        if len(opts.add) == 1:
            opts.add = read_mirrors(ctrl, opts.add[0])
        if len(opts.add) % 2 != 0:
            raise Error, _("Invalid arguments for --add")
        for i in range(0, len(opts.add), 2):
            origin, mirror = opts.add[i:i + 2]
            if mirror:
                sysconf.add(("mirrors", origin), mirror, unique=True)

    if opts.remove:
        if len(opts.remove) == 1:
            opts.remove = read_mirrors(ctrl, opts.remove[0])
        if len(opts.remove) % 2 != 0:
            raise Error, _("Invalid arguments for --remove")
        for i in range(0, len(opts.remove), 2):
            origin, mirror = opts.remove[i:i + 2]
            if not sysconf.has(("mirrors", origin)):
                iface.warning(_("Origin not found: %s") % origin)
            if not sysconf.remove(("mirrors", origin), mirror):
                iface.warning(_("Mirror not found: %s") % mirror)

    if opts.remove_all:
        for origin in opts.remove_all:
            if not sysconf.remove(("mirrors", origin)):
                iface.warning(_("Origin not found: %s") % origin)

    if opts.sync:
        reset = {}
        lst = read_mirrors(ctrl, opts.sync)
        for i in range(0, len(lst), 2):
            origin, mirror = lst[i:i + 2]
            if origin not in reset:
                reset[origin] = True
                sysconf.remove(("mirrors", origin))
            if mirror:
                sysconf.add(("mirrors", origin), mirror, unique=True)

    if opts.clear_history is not None:
        if opts.clear_history:
            history = sysconf.get("mirrors-history", [])
            history[:] = [x for x in history if x[0] not in opts.clear_history]
            sysconf.set("mirrors-history", history)
        else:
            history = sysconf.remove("mirrors-history")

    if opts.show:
        mirrors = sysconf.get("mirrors", ())
        for origin in mirrors:
            print origin
            for mirror in mirrors[origin]:
                print "   ", mirror
            print

    if opts.yaml:
        try:
            import yaml
        except ImportError:
            raise Error, _(
                "Please install PyYAML in order to use this function")
        yamlmirrors = {}
        mirrors = sysconf.get("mirrors", ())
        for origin in mirrors:
            yamlmirrors[origin] = mirrors[origin]
        print yaml.dump(yamlmirrors)

    if opts.edit:
        sysconf.assertWritable()

        fd, name = tempfile.mkstemp(".txt")
        file = os.fdopen(fd, "w")
        print >> file, EDITEXAMPLE
        origins = sysconf.keys("mirrors")
        origins.sort()
        for origin in origins:
            print >> file, origin
            mirrors = sysconf.get(("mirrors", origin))
            for mirror in mirrors:
                print >> file, "   ", mirror
            print >> file
        file.close()
        editor = os.environ.get("EDITOR", "vi")
        olddigest = getFileDigest(name)
        while True:
            os.system("%s %s" % (editor, name))
            newdigest = getFileDigest(name)
            if newdigest == olddigest:
                break
            try:
                lst = read_mirrors(ctrl, name)
            except Error, e:
                iface.error(unicode(e))
                if not iface.askYesNo(_("Continue?"), True):
                    break
                else:
                    continue
            newmirrors = {}
            for i in range(0, len(lst), 2):
                origin, mirror = lst[i:i + 2]
                newmirrors.setdefault(origin, []).append(mirror)
            sysconf.set("mirrors", newmirrors)
            break
        os.unlink(name)
示例#19
0
    if opts.edit:
        sysconf.assertWritable()
        
        fd, name = tempfile.mkstemp(".ini")
        file = os.fdopen(fd, "w")
        aliases = sysconf.keys("channels")
        aliases.sort()
        for alias in aliases:
            channel = sysconf.get(("channels", alias))
            desc = createChannelDescription(alias, parseChannelData(channel))
            print >>file, desc
            print >>file
        file.close()
        editor = os.environ.get("EDITOR", "vi")
        olddigest = getFileDigest(name)
        while True:
            os.system("%s %s" % (editor, name))
            newdigest = getFileDigest(name)
            if newdigest == olddigest:
                break
            file = open(name)
            data = file.read()
            file.close()
            try:
                newchannels = parseChannelsDescription(data)
            except Error, e:
                iface.error(unicode(e))
                if not iface.askYesNo(_("Continue?"), True):
                    break
                else:continue
示例#20
0
class APTDEBChannel(PackageChannel):

    # It's important for the default to be here so that old pickled
    # instances which don't have these attributes still work fine.
    _fingerprint = None
    _keyring = None
    _trustdb = None
    _arch = None

    def __init__(self, baseurl, distro, comps, fingerprint, keyring, trustdb,
                 *args):
        super(APTDEBChannel, self).__init__(*args)

        distro = distro.lstrip('/')
        self._baseurl = baseurl
        self._distro = distro
        self._comps = comps
        if fingerprint:
            self._fingerprint = "".join(fingerprint.split())
        if keyring:
            self._keyring = keyring
        if trustdb:
            self._trustdb = trustdb

    def _getURL(self, filename="", component=None, subpath=False):
        if self._arch is None:
            self._arch = getArchitecture()
        if subpath:
            distrourl = ""
        elif not self._comps:
            distrourl = posixpath.join(self._baseurl, self._distro)
        else:
            distrourl = posixpath.join(self._baseurl, "dists", self._distro)
        if component:
            return posixpath.join(distrourl, component, "binary-" + self._arch,
                                  filename)
        else:
            return posixpath.join(distrourl, filename)

    def getCacheCompareURLs(self):
        return [self._getURL("Release")]

    def getFetchSteps(self):
        if self._comps:
            # Packages*components + Release + Release.gpg
            return len(self._comps) + 2
            # Component Release files are not being used, otherwise it'd be:
            # (Packages+Release)*components + Release + Release.gpg
            #return len(self._comps)*2+2
        else:
            # Packages + Release + Release.gpg
            return 3

    def _checkRelease(self, release_item, release_gpg_item):
        is_secure_channel = bool(self._fingerprint or self._keyring)
        need_release = bool(is_secure_channel or self._comps)
        release_failed = release_item.getFailedReason()

        if need_release and release_failed:
            raise Error, _("Download of Release failed for channel '%s': %s") \
                         % (self, release_failed)

        if is_secure_channel:
            release_gpg_failed = release_gpg_item.getFailedReason()
            if release_gpg_failed:
                raise Error, \
                      _("Download of Release.gpg failed for secure "
                        "channel '%s': %s") % (self, release_gpg_failed)

            arguments = [
                "gpg", "--batch", "--no-secmem-warning", "--status-fd", "1"
            ]

            if self._keyring:
                arguments.append("--no-default-keyring")
                if os.path.isfile(self._keyring):
                    arguments.extend(["--keyring", self._keyring])
                elif os.path.isdir(self._keyring):
                    for keyring in next(os.walk(self._keyring))[2]:
                        keyring = os.path.join(self._keyring, keyring)
                        arguments.extend(["--keyring", keyring])

            if self._trustdb:
                arguments.extend(["--trustdb-name", self._trustdb])

            arguments.extend([
                "--verify",
                release_gpg_item.getTargetPath(),
                release_item.getTargetPath()
            ])

            command = " ".join(arguments)
            status, output = commands.getstatusoutput(command)

            badsig = False
            goodsig = False
            validsig = None
            for line in output.splitlines():
                if line.startswith("[GNUPG:]"):
                    tokens = line[8:].split()
                    first = tokens[0]
                    if first == "VALIDSIG":
                        validsig = tokens[1]
                    elif first == "GOODSIG":
                        goodsig = True
                    elif first == "BADSIG":
                        badsig = True
            if badsig:
                raise Error, _("Channel '%s' has bad signature") % self
            if (not goodsig
                    or (self._fingerprint and validsig != self._fingerprint)):
                raise Error, _("Channel '%s' signed with unknown key") % self

    def _parseRelease(self, release_item):
        checksum = {}
        insidemd5sum = False
        insidesha1 = False
        insidesha256 = False
        for line in open(release_item.getTargetPath()):
            if not insidemd5sum:
                if line.startswith("MD5Sum:"):
                    insidemd5sum = True
            elif not line.startswith(" "):
                insidemd5sum = False
            elif insidemd5sum:
                try:
                    md5, size, path = line.split()
                except ValueError:
                    pass
                else:
                    if not path in checksum:
                        checksum[path] = {}
                    checksum[path]["md5"] = md5
                    checksum[path]["size"] = int(size)
            if not insidesha1:
                if line.startswith("SHA1:"):
                    insidesha1 = True
            elif not line.startswith(" "):
                insidesha1 = False
            elif insidesha1:
                try:
                    sha1, size, path = line.split()
                except ValueError:
                    pass
                else:
                    if not path in checksum:
                        checksum[path] = {}
                    checksum[path]["sha1"] = sha1
                    checksum[path]["size"] = int(size)
            if not insidesha256:
                if line.startswith("SHA256:"):
                    insidesha256 = True
            elif not line.startswith(" "):
                insidesha256 = False
            elif insidesha256:
                try:
                    sha256, size, path = line.split()
                except ValueError:
                    pass
                else:
                    if not path in checksum:
                        checksum[path] = {}
                    checksum[path]["sha256"] = sha256
                    checksum[path]["size"] = int(size)
        return checksum

    def _enqueuePackages(self, fetcher, checksum=None, component=None):
        info = {}
        url = self._getURL("Packages", component)
        subpath = self._getURL("Packages", component, subpath=True)
        if checksum is not None:
            if subpath + ".lzma" in checksum:
                compressed_subpath = subpath + ".lzma"
                url += ".lzma"
            elif subpath + ".bz2" in checksum:
                compressed_subpath = subpath + ".bz2"
                url += ".bz2"
            elif subpath + ".gz" in checksum:
                compressed_subpath = subpath + ".gz"
                url += ".gz"
            elif subpath in checksum:
                compressed_subpath = None
            else:
                return None
            if compressed_subpath:
                info["uncomp"] = True
                info["md5"] = checksum[compressed_subpath].get("md5", None)
                info["sha1"] = checksum[compressed_subpath].get("sha1", None)
                info["sha256"] = checksum[compressed_subpath].get(
                    "sha256", None)
                info["size"] = checksum[compressed_subpath]["size"]
                if subpath in checksum:
                    info["uncomp_md5"] = checksum[subpath].get("md5", None)
                    info["uncomp_sha1"] = checksum[subpath].get("sha1", None)
                    info["uncomp_sha256"] = checksum[subpath].get(
                        "sha256", None)
                    info["uncomp_size"] = checksum[subpath]["size"]
            else:
                info["md5"] = checksum[subpath].get("md5", None)
                info["sha1"] = checksum[subpath].get("sha1", None)
                info["sha256"] = checksum[subpath].get("sha256", None)
                info["size"] = checksum[subpath]["size"]
        else:
            # Default to Packages.gz when we can't find out.
            info["uncomp"] = True
            url += ".gz"
        return fetcher.enqueue(url, **info)

    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch release file
        release_item = fetcher.enqueue(self._getURL("Release"))
        release_gpg_item = fetcher.enqueue(self._getURL("Release.gpg"))
        fetcher.run(progress=progress)

        try:
            self._checkRelease(release_item, release_gpg_item)
        except Error, e:
            progress.add(self.getFetchSteps() - 2)
            progress.show()
            if fetcher.getCaching() is NEVER:
                raise
            else:
                return False

        if not release_item.getFailedReason():
            digest = getFileDigest(release_item.getTargetPath())
            if digest == self._digest:
                progress.add(self.getFetchSteps() - 2)
                progress.show()
                return True
            self.removeLoaders()
            checksum = self._parseRelease(release_item)
        else:
            digest = None
            checksum = None

        fetcher.reset()

        if not self._comps:
            packages_items = [self._enqueuePackages(fetcher, checksum)]
        else:
            packages_items = []
            for component in self._comps:
                item = self._enqueuePackages(fetcher, checksum, component)
                if item:
                    packages_items.append(item)
                else:
                    iface.warning(
                        _("Component '%s' is not in Release file "
                          "for channel '%s'") % (component, self))

        fetcher.run(progress=progress)

        errorlines = []
        for item in packages_items:
            if item.getStatus() == SUCCEEDED:
                localpath = item.getTargetPath()
                loader = DebTagFileLoader(localpath, self._baseurl)
                loader.setChannel(self)
                self._loaders.append(loader)
            else:
                errorlines.append(u"%s: %s" %
                                  (item.getURL(), item.getFailedReason()))

        if errorlines:
            if fetcher.getCaching() is NEVER:
                errorlines.insert(
                    0,
                    _("Failed acquiring information for '%s':") % self)
                raise Error, "\n".join(errorlines)
            return False

        if digest:
            self._digest = digest

        return True
示例#21
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        # Fetch release file
        item = fetcher.enqueue(self._getURL("Release"))
        gpgitem = fetcher.enqueue(self._getURL("Release.gpg"))
        fetcher.run(progress=progress)
        failed = item.getFailedReason()
        if failed:
            progress.add(self.getFetchSteps()-2)
            progress.show()
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring information for '%s':") % self,
                         u"%s: %s" % (item.getURL(), failed)]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(self.getFetchSteps()-2)
            progress.show()
            return True
        self.removeLoaders()

        # Parse release file
        md5sum = {}
        insidemd5sum = False
        for line in open(item.getTargetPath()):
            if not insidemd5sum:
                if line.startswith("MD5Sum:"):
                    insidemd5sum = True
            elif not line.startswith(" "):
                insidemd5sum = False
            else:
                try:
                    md5, size, path = line.split()
                except ValueError:
                    pass
                else:
                    md5sum[path] = (md5, int(size))

        if self._fingerprint:
            try:
                failed = gpgitem.getFailedReason()
                if failed:
                    raise Error, _("Channel '%s' has fingerprint but download "
                                   "of Release.gpg failed: %s")%(self, failed)

                status, output = commands.getstatusoutput(
                    "gpg --batch --no-secmem-warning --status-fd 1 "
                    "--verify %s %s" % (gpgitem.getTargetPath(),
                                        item.getTargetPath()))

                badsig = False
                goodsig = False
                validsig = None
                for line in output.splitlines():
                    if line.startswith("[GNUPG:]"):
                        tokens = line[8:].split()
                        first = tokens[0]
                        if first == "VALIDSIG":
                            validsig = tokens[1]
                        elif first == "GOODSIG":
                            goodsig = True
                        elif first == "BADSIG":
                            badsig = True
                if badsig:
                    raise Error, _("Channel '%s' has bad signature") % self
                if not goodsig or validsig != self._fingerprint:
                    raise Error, _("Channel '%s' signed with unknown key") \
                                 % self
            except Error, e:
                progress.add(self.getFetchSteps()-2)
                progress.show()
                if fetcher.getCaching() is NEVER:
                    raise
                else:
                    return False
示例#22
0
    def fetch(self, fetcher, progress):

        fetcher.reset()
        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        item = fetcher.enqueue(repomd)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps() - 1)
            if fetcher.getCaching() is NEVER:
                lines = [
                    _("Failed acquiring release file for '%s':") % self,
                    u"%s: %s" % (item.getURL(), item.getFailedReason())
                ]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = {}
        root = ElementTree.parse(item.getTargetPath()).getroot()
        for node in root.getchildren():
            if node.tag != DATA:
                continue
            type = node.get("type")
            info[type] = {}
            for subnode in node.getchildren():
                if subnode.tag == LOCATION:
                    info[type]["url"] = \
                        posixpath.join(self._baseurl, subnode.get("href"))
                if subnode.tag == CHECKSUM:
                    info[type][subnode.get("type")] = subnode.text
                if subnode.tag == OPENCHECKSUM:
                    info[type]["uncomp_"+subnode.get("type")] = \
                        subnode.text

        if "primary" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        fetcher.reset()
        item = fetcher.enqueue(info["primary"]["url"],
                               md5=info["primary"].get("md5"),
                               uncomp_md5=info["primary"].get("uncomp_md5"),
                               sha=info["primary"].get("sha"),
                               uncomp_sha=info["primary"].get("uncomp_sha"),
                               uncomp=True)
        flitem = fetcher.enqueue(
            info["filelists"]["url"],
            md5=info["filelists"].get("md5"),
            uncomp_md5=info["filelists"].get("uncomp_md5"),
            sha=info["filelists"].get("sha"),
            uncomp_sha=info["filelists"].get("uncomp_sha"),
            uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif (item.getStatus() == SUCCEEDED and flitem.getStatus() == FAILED
              and fetcher.getCaching() is ALWAYS):
            iface.warning(
                _("You must fetch channel information to "
                  "acquire needed filelists."))
            return False
        elif fetcher.getCaching() is NEVER:
            lines = [
                _("Failed acquiring information for '%s':") % self,
                u"%s: %s" % (item.getURL(), item.getFailedReason())
            ]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#23
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        if self._compressed:
            PACKAGES_TXT = "PACKAGES.TXT.gz"
            CHECKSUMS_md5 = "CHECKSUMS.md5.gz"
        else:
            PACKAGES_TXT = "PACKAGES.TXT"
            CHECKSUMS_md5 = "CHECKSUMS.md5"

        # Fetch packages file
        url = posixpath.join(self._baseurl, PACKAGES_TXT)
        item = fetcher.enqueue(url, uncomp=self._compressed)
        fetcher.run(progress=progress)
        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            fetcher.reset()
            url = posixpath.join(self._baseurl, CHECKSUMS_md5)
            item = fetcher.enqueue(url, uncomp=self._compressed)
            if self._fingerprint:
                gpgurl = posixpath.join(self._baseurl, CHECKSUMS_md5 + ".asc")
                gpgitem = fetcher.enqueue(gpgurl)
            fetcher.run(progress=progress)
            if item.getStatus() == SUCCEEDED:
                checksumpath = item.getTargetPath()
            else:
                checksumpath = None
            if self._fingerprint:
                if gpgitem.getStatus() is SUCCEEDED:
                    try:
                        status, output = commands.getstatusoutput(
                            "gpg --batch --no-secmem-warning --status-fd 1 "
                            "--verify %s %s" %
                            (gpgitem.getTargetPath(), item.getTargetPath()))

                        badsig = False
                        goodsig = False
                        validsig = None
                        for line in output.splitlines():
                            if line.startswith("[GNUPG:]"):
                                tokens = line[8:].split()
                                first = tokens[0]
                                if first == "VALIDSIG":
                                    validsig = tokens[1]
                                elif first == "GOODSIG":
                                    goodsig = True
                                elif first == "BADSIG":
                                    badsig = True
                        if badsig:
                            raise Error, _(
                                "Channel '%s' has bad signature") % self
                        if not goodsig or validsig != self._fingerprint:
                            raise Error, _("Channel '%s' signed with unknown key") \
                                         % self
                    except Error, e:
                        progress.add(self.getFetchSteps() - 2)
                        progress.show()
                        if fetcher.getCaching() is NEVER:
                            raise
                        else:
                            return False
            self.removeLoaders()
            loader = SlackSiteLoader(localpath, checksumpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
示例#24
0
    def fetch(self, fetcher, progress):

        # Fetch media information file
        # This file contains the timestamp info
        # that says if the repository has changed
        fetchitem = posixpath.join(self._baseurl, "media.1/media")
        fetched = self.__fetchFile(fetchitem, fetcher, progress)
        if fetched.getStatus() == FAILED:
            return False

        digest = getFileDigest(fetched.getTargetPath())
        #if digest == self._digest and getattr(self, "force-yast", False):
        if digest == self._digest:
            return True

        # Find location of description files
        fetchitem = posixpath.join(self._baseurl, "content")
        fetched = self.__fetchFile(fetchitem, fetcher, progress)
        if fetched.getStatus() == FAILED:
            return False

        descrdir = "suse/setup/descr"
        datadir = "RPMS"
        uncompress = self._compressed
        for line in open(fetched.getTargetPath()):
            line = line.strip()
            try:
                key, rest = line.split(None, 1)
            except ValueError:
                continue

            if key == "DESCRDIR":
                descrdir = rest
            elif key == "DATADIR":
                datadir = rest
            elif key == "META":
                # Autodetect compressed/uncompressed SuSEtags metadata.
                if rest.endswith("packages"):
                    uncompress = False
                elif rest.endswith("packages.gz"):
                    uncompress = True

        # Fetch package information (req, dep, prov, etc)
        fetchitem = posixpath.join(self._baseurl, "%s/packages" % descrdir)
        if uncompress:
            fetchitem += ".gz"
        fetched = self.__fetchFile(fetchitem, fetcher, progress, uncompress)
        if fetched.getStatus() == FAILED:
            return False

        self.removeLoaders()

        pkginfofile = fetched.getTargetPath()
        header = open(pkginfofile).readline().strip()
        if header == "=Ver: 2.0":
            fetchitem = posixpath.join(self._baseurl,
                                       "%s/packages.en" % descrdir)
            if uncompress:
                fetchitem += ".gz"

            fetched = self.__fetchFile(fetchitem, fetcher, progress,
                                       uncompress)

            if (fetched.getStatus() == FAILED
                    or open(fetched.getTargetPath()).readline().strip() !=
                    "=Ver: 2.0"):
                raise Error, "YaST2 package descriptions not loaded."
            else:
                pkgdescfile = fetched.getTargetPath()
                loader = YaST2Loader(self._baseurl, datadir, pkginfofile,
                                     pkgdescfile)

            loader.setChannel(self)
            self._loaders.append(loader)
        else:
            raise Error, _("Invalid package file header (%s)" % header)

        self._digest = digest

        return True
示例#25
0
    def fetch(self, fetcher, progress):
        
        fetcher.reset()
        repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
        item = fetcher.enqueue(repomd)
        fetcher.run(progress=progress)

        if item.getStatus() is FAILED:
            progress.add(self.getFetchSteps()-1)
            if fetcher.getCaching() is NEVER:
                lines = [_("Failed acquiring release file for '%s':") % self,
                         u"%s: %s" % (item.getURL(), item.getFailedReason())]
                raise Error, "\n".join(lines)
            return False

        digest = getFileDigest(item.getTargetPath())
        if digest == self._digest:
            progress.add(1)
            return True
        self.removeLoaders()

        info = {}
        root = ElementTree.parse(item.getTargetPath()).getroot()
        for node in root.getchildren():
            if node.tag != DATA:
                continue
            type = node.get("type")
            info[type] = {}
            for subnode in node.getchildren():
                if subnode.tag == LOCATION:
                    info[type]["url"] = \
                        posixpath.join(self._baseurl, subnode.get("href"))
                if subnode.tag == CHECKSUM:
                    info[type][subnode.get("type")] = subnode.text
                if subnode.tag == OPENCHECKSUM:
                    info[type]["uncomp_"+subnode.get("type")] = \
                        subnode.text

        if "primary" not in info:
            raise Error, _("Primary information not found in repository "
                           "metadata for '%s'") % self

        fetcher.reset()
        item = fetcher.enqueue(info["primary"]["url"],
                               md5=info["primary"].get("md5"),
                               uncomp_md5=info["primary"].get("uncomp_md5"),
                               sha=info["primary"].get("sha"),
                               uncomp_sha=info["primary"].get("uncomp_sha"),
                               uncomp=True)
        flitem = fetcher.enqueue(info["filelists"]["url"],
                                 md5=info["filelists"].get("md5"),
                                 uncomp_md5=info["filelists"].get("uncomp_md5"),
                                 sha=info["filelists"].get("sha"),
                                 uncomp_sha=info["filelists"].get("uncomp_sha"),
                                 uncomp=True)
        fetcher.run(progress=progress)

        if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            filelistspath = flitem.getTargetPath()
            loader = RPMMetaDataLoader(localpath, filelistspath,
                                       self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)
        elif (item.getStatus() == SUCCEEDED and
              flitem.getStatus() == FAILED and
              fetcher.getCaching() is ALWAYS):
            iface.warning(_("You must fetch channel information to "
                            "acquire needed filelists."))
            return False
        elif fetcher.getCaching() is NEVER:
            lines = [_("Failed acquiring information for '%s':") % self,
                       u"%s: %s" % (item.getURL(), item.getFailedReason())]
            raise Error, "\n".join(lines)
        else:
            return False

        self._digest = digest

        return True
示例#26
0
    def fetch(self, fetcher, progress):

        fetcher.reset()

        if self._compressed:
            PACKAGES_TXT="PACKAGES.TXT.gz"
            CHECKSUMS_md5="CHECKSUMS.md5.gz"
        else:
            PACKAGES_TXT="PACKAGES.TXT"
            CHECKSUMS_md5="CHECKSUMS.md5"

        # Fetch packages file
        url = posixpath.join(self._baseurl, PACKAGES_TXT)
        item = fetcher.enqueue(url, uncomp=self._compressed)
        fetcher.run(progress=progress)
        if item.getStatus() == SUCCEEDED:
            localpath = item.getTargetPath()
            digest = getFileDigest(localpath)
            if digest == self._digest:
                return True
            fetcher.reset()
            url = posixpath.join(self._baseurl, CHECKSUMS_md5)
            item = fetcher.enqueue(url, uncomp=self._compressed)
            if self._fingerprint:
                gpgurl = posixpath.join(self._baseurl, CHECKSUMS_md5 + ".asc")
                gpgitem = fetcher.enqueue(gpgurl)
            fetcher.run(progress=progress)
            if item.getStatus() == SUCCEEDED:
                checksumpath = item.getTargetPath()
            else:
                checksumpath = None
            if self._fingerprint:
                if gpgitem.getStatus() is SUCCEEDED:
                    try:
                        status, output = commands.getstatusoutput(
                            "gpg --batch --no-secmem-warning --status-fd 1 "
                            "--verify %s %s" % (gpgitem.getTargetPath(),
                                                item.getTargetPath()))
    
                        badsig = False
                        goodsig = False
                        validsig = None
                        for line in output.splitlines():
                            if line.startswith("[GNUPG:]"):
                                tokens = line[8:].split()
                                first = tokens[0]
                                if first == "VALIDSIG":
                                    validsig = tokens[1]
                                elif first == "GOODSIG":
                                    goodsig = True
                                elif first == "BADSIG":
                                    badsig = True
                        if badsig:
                            raise Error, _("Channel '%s' has bad signature") % self
                        if not goodsig or validsig != self._fingerprint:
                            raise Error, _("Channel '%s' signed with unknown key") \
                                         % self
                    except Error, e:
                        progress.add(self.getFetchSteps()-2)
                        progress.show()
                        if fetcher.getCaching() is NEVER:
                            raise
                        else:
                            return False
            self.removeLoaders()
            loader = SlackSiteLoader(localpath, checksumpath, self._baseurl)
            loader.setChannel(self)
            self._loaders.append(loader)