def _load_gpg_keys(self, force_update): """ Load GPG keys from the config, downloading if necessary. :param force_update: Ignore all locally cached and downloaded data and fetch the metadata anew from the upstream repository. :type force_update: bool """ keyfiles = [] keys = [] for source in self.sources.entries: for key in source.gpgkeys: localfile = os.path.join(self.keypath, os.path.basename(key.rstrip("/"))) if localfile not in keyfiles: keyfiles.append(localfile) if ((force_update and key not in keys) or not os.path.exists(localfile)): self.logger.info("Packages: Downloading and parsing %s" % key) response = urlopen(key) open(localfile, 'w').write(response.read()) keys.append(key) for kfile in glob.glob(os.path.join(self.keypath, "*")): if kfile not in keyfiles: os.unlink(kfile)
def _load_gpg_keys(self, force_update): """ Load GPG keys from the config, downloading if necessary. :param force_update: Ignore all locally cached and downloaded data and fetch the metadata anew from the upstream repository. :type force_update: bool """ keyfiles = [] keys = [] for source in self.sources.entries: for key in source.gpgkeys: localfile = os.path.join(self.keypath, os.path.basename(key.rstrip("/"))) if localfile not in keyfiles: keyfiles.append(localfile) if ((force_update and key not in keys) or not os.path.exists(localfile)): self.logger.info("Packages: Downloading and parsing %s" % key) try: open(localfile, 'w').write(urlopen(key).read()) keys.append(key) except (URLError, HTTPError): err = sys.exc_info()[1] self.logger.error( "Packages: Error downloading %s: %s" % (key, err)) except IOError: err = sys.exc_info()[1] self.logger.error("Packages: Error writing %s to %s: " "%s" % (key, localfile, err)) for kfile in glob.glob(os.path.join(self.keypath, "*")): if kfile not in keyfiles: os.unlink(kfile)
def _load_gpg_keys(self, force_update): """ Load GPG keys from the config, downloading if necessary. :param force_update: Ignore all locally cached and downloaded data and fetch the metadata anew from the upstream repository. :type force_update: bool """ keyfiles = [] keys = [] for source in self.sources.entries: for key in source.gpgkeys: localfile = os.path.join(self.keypath, os.path.basename(key.rstrip("/"))) if localfile not in keyfiles: keyfiles.append(localfile) if ((force_update and key not in keys) or not os.path.exists(localfile)): self.logger.info("Packages: Downloading and parsing %s" % key) try: open(localfile, 'w').write(urlopen(key).read()) keys.append(key) except (URLError, HTTPError): err = sys.exc_info()[1] self.logger.error("Packages: Error downloading %s: %s" % (key, err)) except IOError: err = sys.exc_info()[1] self.logger.error("Packages: Error writing %s to %s: " "%s" % (key, localfile, err)) for kfile in glob.glob(os.path.join(self.keypath, "*")): if kfile not in keyfiles: os.unlink(kfile)
def _update_pkgdata(self, pkgdata, source_url): for section in source_url.sections: for arch in self.architectures: url = "%s/dists/%s/%s/binary-%s/Packages.gz" % (source_url.url, source_url.distribution, section, arch) debug("Processing url %s\n" % (url)) try: data = urlopen(url) buf = StringIO(''.join(data.readlines())) reader = gzip.GzipFile(fileobj=buf) for line in reader.readlines(): if line[:8] == 'Package:': pkgname = line.split(' ')[1].strip() elif line[:8] == 'Version:': version = line.split(' ')[1].strip() if pkgname in pkgdata: if arch in pkgdata[pkgname]: # The package is listed twice for the same architecture # We keep the most recent version old_version = pkgdata[pkgname][arch] if self._pkg_version_is_older(old_version, version): pkgdata[pkgname][arch] = version else: # The package data exists for another architecture, # but not for this one. Add it. pkgdata[pkgname][arch] = version else: # First entry for this package pkgdata[pkgname] = {arch: version} else: continue except: raise Exception("Could not process URL %s\n%s\nPlease " "verify the URL." % (url, sys.exc_info()[1])) return pkgdata
def fetch_url(url): if "@" in url: mobj = re.match("(\w+://)([^:]+):([^@]+)@(.*)$", url) if not mobj: raise ValueError user = mobj.group(2) passwd = mobj.group(3) url = mobj.group(1) + mobj.group(4) auth = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm()) auth.add_password(None, url, user, passwd) install_opener(build_opener(auth)) return urlopen(url).read()
def _load_gpg_keys(self, force_update): """ Load gpg keys from the config """ keyfiles = [] keys = [] for source in self.sources: for key in source.gpgkeys: localfile = os.path.join(self.keypath, os.path.basename(key.rstrip("/"))) if localfile not in keyfiles: keyfiles.append(localfile) if (force_update and key not in keys) or not os.path.exists(localfile): self.logger.info("Packages: Downloading and parsing %s" % key) response = urlopen(key) open(localfile, "w").write(response.read()) keys.append(key) for kfile in glob.glob(os.path.join(self.keypath, "*")): if kfile not in keyfiles: os.unlink(kfile)
def fetch_url(url): """ Return the content of the given URL. :param url: The URL to fetch content from. :type url: string :raises: ValueError - Malformed URL :raises: URLError - Failure fetching URL :returns: string - the content of the page at the given URL """ if '@' in url: mobj = re.match(r'(\w+://)([^:]+):([^@]+)@(.*)$', url) if not mobj: raise ValueError("Invalid URL") user = mobj.group(2) passwd = mobj.group(3) url = mobj.group(1) + mobj.group(4) auth = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm()) auth.add_password(None, url, user, passwd) install_opener(build_opener(auth)) return urlopen(url).read()
def _update_pkgdata(self, pkgdata, source_url): for section in source_url.sections: for arch in self.architectures: if source_url.arch != arch and source_url.arch != "all": continue if source_url.arch == "all" and arch in self.arch_specialurl: continue url = "%s/dists/%s/%s/binary-%s/Packages.gz" % ( source_url.url, source_url.distribution, section, arch) debug("Processing url %s\n" % (url)) try: data = urlopen(url) buf = StringIO(''.join(data.readlines())) reader = gzip.GzipFile(fileobj=buf) for line in reader.readlines(): if line[:8] == 'Package:': pkgname = line.split(' ')[1].strip() elif line[:8] == 'Version:': version = line.split(' ')[1].strip() if pkgname in pkgdata: if arch in pkgdata[pkgname]: # The package is listed twice for the same architecture # We keep the most recent version old_version = pkgdata[pkgname][arch] if self._pkg_version_is_older( old_version, version): pkgdata[pkgname][arch] = version else: # The package data exists for another architecture, # but not for this one. Add it. pkgdata[pkgname][arch] = version else: # First entry for this package pkgdata[pkgname] = {arch: version} else: continue except: raise Exception("Could not process URL %s\n%s\nPlease " "verify the URL." % (url, sys.exc_info()[1])) return dict((k, v) for (k, v) in list(pkgdata.items()) \ if re.search(self.pattern, k))