def refresh_yum_repo(mirror, data, mirror_url, ts): """ Refresh package metadata for a yum-style rpm mirror and add the packages to the mirror """ primary_url, checksum, checksum_type = get_primary_url(mirror_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = response_is_valid(res) if not mirror.last_access_ok: mirror.fail() return data = download_url(res, 'Downloading repo info (2/2):') if data is None: mirror.fail() return sha = get_sha(checksum_type, data) if sha is None: mirror.fail() return if not checksum_is_valid(sha, checksum, mirror): mirror.fail() return if mirror.file_checksum == checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) return mirror.file_checksum = checksum if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() if have_checksum >= max_mirrors: text = '{0!s} mirrors already have this '.format(max_mirrors) text += 'checksum, ignoring refresh to save time' info_message.send(sender=None, text=text) else: packages = extract_yum_packages(data, primary_url) if packages: update_mirror_packages(mirror, packages)
def find_mirror_url(stored_mirror_url, formats): """ Find the actual URL of the mirror by trying predefined paths """ for fmt in formats: mirror_url = stored_mirror_url for f in formats: if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] mirror_url = mirror_url.rstrip('/') + '/' + fmt debug_message.send(sender=None, text='Checking {0!s}'.format(mirror_url)) res = get_url(mirror_url) if res is not None and res.ok: return res
def get_mirrorlist_urls(url): """ Checks if a given url returns a mirrorlist by checking if it is of type text/plain and contains a list of urls. Returns a list of mirrors if it is a mirrorlist. """ res = get_url(url) if response_is_valid(res): if 'content-type' in res.headers and \ 'text/plain' in res.headers['content-type']: data = download_url(res, 'Downloading repo info:') if data is None: return mirror_urls = re.findall(b'^http://.*$|^ftp://.*$', data, re.MULTILINE) if mirror_urls: return mirror_urls
def get_metalink_urls(url): """ Parses a metalink and returns a list of mirrors """ res = get_url(url) if response_is_valid(res): if 'content-type' in res.headers and \ res.headers['content-type'] == 'application/metalink+xml': data = download_url(res, 'Downloading repo info:') ns = 'http://www.metalinker.org/' try: context = etree.parse(BytesIO(data), etree.XMLParser()) except etree.XMLSyntaxError: context = etree.parse(BytesIO(extract(data, 'gz')), etree.XMLParser()) xpath = "//ns:files/ns:file[@name='repomd.xml']/ns:resources/ns:url[@protocol='https']" # noqa metalink_urls = context.xpath(xpath, namespaces={'ns': ns}) return [x.text for x in metalink_urls]
def refresh_yast_repo(mirror, data): """ Refresh package metadata for a yast-style rpm mirror and add the packages to the mirror """ package_dir = re.findall(b'DESCRDIR *(.*)', data)[0].decode('ascii') package_url = '{0!s}/{1!s}/packages.gz'.format(mirror.url, package_dir) res = get_url(package_url) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (2/2):') if data is None: mirror.fail() return mirror.file_checksum = 'yast' packages = extract_yast_packages(data) if packages: update_mirror_packages(mirror, packages) else: mirror.fail()
def download_errata(): """ Download CentOS errata from https://cefs.steve-meier.de/ """ res = get_url('https://cefs.steve-meier.de/errata.latest.xml') return download_url(res, 'Downloading CentOS Errata:')