def downloadPkgs(self, package_objects, count): localpkgs = self.localpkgs.keys() progress_obj = TextProgress(count) for po in package_objects: if po.name() in localpkgs: continue filename = self.getLocalPkgPath(po) if os.path.exists(filename): if self.checkPkg(filename) == 0: continue dirn = os.path.dirname(filename) if not os.path.exists(dirn): os.makedirs(dirn) url = self.get_url(po) proxies = self.get_proxies(po) try: filename = myurlgrab(url.full, filename, proxies, progress_obj) except CreatorError: self.close() raise
def downloadPkgs(self, package_objects, count): localpkgs = self.localpkgs.keys() progress_obj = TextProgress(count) for po in package_objects: if po.name() in localpkgs: continue filename = self.getLocalPkgPath(po) if os.path.exists(filename): if self.checkPkg(filename) == 0: continue dirn = os.path.dirname(filename) if not os.path.exists(dirn): os.makedirs(dirn) url = self.get_url(po) proxies = self.get_proxies(po) try: filename = myurlgrab(url, filename, proxies, progress_obj) except CreatorError: self.close() raise
def _get_uncompressed_data_from_url(url, filename, proxies): filename = myurlgrab(url, filename, proxies) suffix = None if filename.endswith(".gz"): suffix = ".gz" runner.quiet(['gunzip', "-f", filename]) elif filename.endswith(".bz2"): suffix = ".bz2" runner.quiet(['bunzip2', "-f", filename]) if suffix: filename = filename.replace(suffix, "") return filename
def __attachment_packages(self, pkg_manager): if not self.ks: return self._attachment = [] for item in kickstart.get_attachment(self.ks): if item.startswith('/'): fpaths = os.path.join(self._instroot, item.lstrip('/')) for fpath in glob.glob(fpaths): self._attachment.append(fpath) continue filelist = pkg_manager.getFilelist(item) if filelist: # found rpm in rootfs for pfile in pkg_manager.getFilelist(item): fpath = os.path.join(self._instroot, pfile.lstrip('/')) self._attachment.append(fpath) continue # try to retrieve rpm file (url, proxies) = pkg_manager.package_url(item) if not url: msger.warning("Can't get url from repo for %s" % item) continue fpath = os.path.join(self.cachedir, os.path.basename(url)) if not os.path.exists(fpath): # download pkgs try: fpath = grabber.myurlgrab(url, fpath, proxies, None) except CreatorError: raise tmpdir = self._mkdtemp() misc.extract_rpm(fpath, tmpdir) for (root, dirs, files) in os.walk(tmpdir): for fname in files: fpath = os.path.join(root, fname) self._attachment.append(fpath)
def __attachment_packages(self, pkg_manager): if not self.ks: return self._attachment = [] for item in kickstart.get_attachment(self.ks): if item.startswith('/'): fpaths = os.path.join(self._instroot, item.lstrip('/')) for fpath in glob.glob(fpaths): self._attachment.append(fpath) continue filelist = pkg_manager.getFilelist(item) if filelist: # found rpm in rootfs for pfile in pkg_manager.getFilelist(item): fpath = os.path.join(self._instroot, pfile.lstrip('/')) self._attachment.append(fpath) continue # try to retrieve rpm file (url, proxies) = pkg_manager.package_url(item) if not url: msger.warning("Can't get url from repo for %s" % item) continue fpath = os.path.join(self.cachedir, os.path.basename(url)) if not os.path.exists(fpath): # download pkgs try: fpath = grabber.myurlgrab(url.full, fpath, proxies, None) except CreatorError: raise tmpdir = self._mkdtemp() misc.extract_rpm(fpath, tmpdir) for (root, dirs, files) in os.walk(tmpdir): for fname in files: fpath = os.path.join(root, fname) self._attachment.append(fpath)
def get_package(pkg, repometadata, arch = None): ver = "" target_repo = None if not arch: arches = [] elif arch not in rpmmisc.archPolicies: arches = [arch] else: arches = rpmmisc.archPolicies[arch].split(':') arches.append('noarch') for repo in repometadata: if repo["primary"].endswith(".xml"): root = xmlparse(repo["primary"]) ns = root.getroot().tag ns = ns[0:ns.rindex("}")+1] for elm in root.getiterator("%spackage" % ns): if elm.find("%sname" % ns).text == pkg: if elm.find("%sarch" % ns).text in arches: version = elm.find("%sversion" % ns) tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel']) if tmpver > ver: ver = tmpver location = elm.find("%slocation" % ns) pkgpath = "%s" % location.attrib['href'] target_repo = repo break if repo["primary"].endswith(".sqlite"): con = sqlite.connect(repo["primary"]) if arch: sql = 'select version, release, location_href from packages ' \ 'where name = "%s" and arch IN ("%s")' % \ (pkg, '","'.join(arches)) for row in con.execute(sql): tmpver = "%s-%s" % (row[0], row[1]) if tmpver > ver: ver = tmpver pkgpath = "%s" % row[2] target_repo = repo break else: sql = 'select version, release, location_href from packages ' \ 'where name = "%s"' % pkg for row in con.execute(sql): tmpver = "%s-%s" % (row[0], row[1]) if tmpver > ver: ver = tmpver pkgpath = "%s" % row[2] target_repo = repo break con.close() if target_repo: makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"])) url = os.path.join(target_repo["baseurl"], pkgpath) filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath))) if os.path.exists(filename): ret = rpmmisc.checkRpmIntegrity('rpm', filename) if ret == 0: return filename msger.warning("package %s is damaged: %s" % (os.path.basename(filename), filename)) os.unlink(filename) pkg = myurlgrab(str(url), filename, target_repo["proxies"]) return pkg else: return None
def get_metadata_from_repos(repos, cachedir): my_repo_metadata = [] for repo in repos: reponame = repo['name'] baseurl = repo['baseurl'] if 'proxy' in repo: proxy = repo['proxy'] else: proxy = get_proxy_for(baseurl) proxies = None if proxy: proxies = {str(baseurl.split(":")[0]):str(proxy)} makedirs(os.path.join(cachedir, reponame)) url = os.path.join(baseurl, "repodata/repomd.xml") filename = os.path.join(cachedir, reponame, 'repomd.xml') repomd = myurlgrab(url, filename, proxies) try: root = xmlparse(repomd) except SyntaxError: raise CreatorError("repomd.xml syntax error.") ns = root.getroot().tag ns = ns[0:ns.rindex("}")+1] filepaths = {} checksums = {} sumtypes = {} for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] == "patterns": filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href'] checksums['patterns'] = elm.find("%sopen-checksum" % ns).text sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type'] break for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] in ("group_gz", "group"): filepaths['comps'] = elm.find("%slocation" % ns).attrib['href'] checksums['comps'] = elm.find("%sopen-checksum" % ns).text sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type'] break primary_type = None for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] in ("primary_db", "primary"): primary_type = elm.attrib["type"] filepaths['primary'] = elm.find("%slocation" % ns).attrib['href'] checksums['primary'] = elm.find("%sopen-checksum" % ns).text sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type'] break if not primary_type: continue for item in ("primary", "patterns", "comps"): if item not in filepaths: filepaths[item] = None continue if not filepaths[item]: continue filepaths[item] = _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filepaths[item], sumtypes[item], checksums[item]) """ Get repo key """ try: repokey = _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key") except CreatorError: repokey = None msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key")) my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":filepaths['primary'], "cachedir":cachedir, "proxies":proxies, "patterns":filepaths['patterns'], "comps":filepaths['comps'], "repokey":repokey}) return my_repo_metadata