def _get_uncompressed_data_from_url(url, filename, proxies): filename = myurlgrab(url, filename, proxies) suffix = None if filename.endswith(".gz"): suffix = ".gz" runner.quiet(['gunzip', "-f", filename]) elif filename.endswith(".bz2"): suffix = ".bz2" runner.quiet(['bunzip2', "-f", filename]) if suffix: filename = filename.replace(suffix, "") return filename
def get_package(pkg, repometadata, arch = None): ver = "" target_repo = None if not arch: arches = [] elif arch not in rpmmisc.archPolicies: arches = [arch] else: arches = rpmmisc.archPolicies[arch].split(':') arches.append('noarch') for repo in repometadata: if repo["primary"].endswith(".xml"): root = xmlparse(repo["primary"]) ns = root.getroot().tag ns = ns[0:ns.rindex("}")+1] for elm in root.getiterator("%spackage" % ns): if elm.find("%sname" % ns).text == pkg: if elm.find("%sarch" % ns).text in arches: version = elm.find("%sversion" % ns) tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel']) if tmpver > ver: ver = tmpver location = elm.find("%slocation" % ns) pkgpath = "%s" % location.attrib['href'] target_repo = repo break if repo["primary"].endswith(".sqlite"): con = sqlite.connect(repo["primary"]) if arch: sql = 'select version, release, location_href from packages ' \ 'where name = "%s" and arch IN ("%s")' % \ (pkg, '","'.join(arches)) for row in con.execute(sql): tmpver = "%s-%s" % (row[0], row[1]) if tmpver > ver: ver = tmpver pkgpath = "%s" % row[2] target_repo = repo break else: sql = 'select version, release, location_href from packages ' \ 'where name = "%s"' % pkg for row in con.execute(sql): tmpver = "%s-%s" % (row[0], row[1]) if tmpver > ver: ver = tmpver pkgpath = "%s" % row[2] target_repo = repo break con.close() if target_repo: makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"])) url = os.path.join(target_repo["baseurl"], pkgpath) filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath))) if os.path.exists(filename): ret = rpmmisc.checkRpmIntegrity('rpm', filename) if ret == 0: return filename msger.warning("package %s is damaged: %s" % (os.path.basename(filename), filename)) os.unlink(filename) pkg = myurlgrab(str(url), filename, target_repo["proxies"]) return pkg else: return None
def get_metadata_from_repos(repos, cachedir): my_repo_metadata = [] for repo in repos: reponame = repo['name'] baseurl = repo['baseurl'] if 'proxy' in repo: proxy = repo['proxy'] else: proxy = get_proxy_for(baseurl) proxies = None if proxy: proxies = {str(baseurl.split(":")[0]):str(proxy)} makedirs(os.path.join(cachedir, reponame)) url = os.path.join(baseurl, "repodata/repomd.xml") filename = os.path.join(cachedir, reponame, 'repomd.xml') repomd = myurlgrab(url, filename, proxies) try: root = xmlparse(repomd) except SyntaxError: raise CreatorError("repomd.xml syntax error.") ns = root.getroot().tag ns = ns[0:ns.rindex("}")+1] filepaths = {} checksums = {} sumtypes = {} for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] == "patterns": filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href'] checksums['patterns'] = elm.find("%sopen-checksum" % ns).text sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type'] break for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] in ("group_gz", "group"): filepaths['comps'] = elm.find("%slocation" % ns).attrib['href'] checksums['comps'] = elm.find("%sopen-checksum" % ns).text sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type'] break primary_type = None for elm in root.getiterator("%sdata" % ns): if elm.attrib["type"] in ("primary_db", "primary"): primary_type = elm.attrib["type"] filepaths['primary'] = elm.find("%slocation" % ns).attrib['href'] checksums['primary'] = elm.find("%sopen-checksum" % ns).text sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type'] break if not primary_type: continue for item in ("primary", "patterns", "comps"): if item not in filepaths: filepaths[item] = None continue if not filepaths[item]: continue filepaths[item] = _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filepaths[item], sumtypes[item], checksums[item]) """ Get repo key """ try: repokey = _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key") except CreatorError: repokey = None msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key")) my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":filepaths['primary'], "cachedir":cachedir, "proxies":proxies, "patterns":filepaths['patterns'], "comps":filepaths['comps'], "repokey":repokey}) return my_repo_metadata