def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ fname = '{0!s}.db'.format(repo.repo_id) for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, [fname]) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: mirror_url = res.url text = 'Found arch repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return sha1 = get_sha1(data) if mirror.file_checksum == sha1: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_arch_packages(data) mirror.last_access_ok = True mirror.timestamp = datetime.now() update_mirror_packages(mirror, packages) mirror.file_checksum = sha1 packages.clear() else: mirror.fail() mirror.save()
def mirrorlists_check(repo): """ Check if any of the mirrors are actually mirrorlists """ for mirror in repo.mirror_set.all(): mirror_urls = mirrorlist_check(mirror.url) if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() text = 'Found mirrorlist - {0!s}'.format(mirror.url) info_message.send(sender=None, text=text) for mirror_url in mirror_urls: mirror_url = mirror_url.decode('ascii') mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = mirror.repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from patchman.repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def refresh_yum_repo(mirror, data, mirror_url, ts): """ Refresh package metadata for a yum-style rpm mirror and add the packages to the mirror """ primary_url, checksum, checksum_type = get_primary_url(mirror_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = response_is_valid(res) if not mirror.last_access_ok: mirror.fail() return data = download_url(res, 'Downloading repo info (2/2):') if data is None: mirror.fail() return sha = get_sha(checksum_type, data) if sha is None: mirror.fail() return if not checksum_is_valid(sha, checksum, mirror): mirror.fail() return if mirror.file_checksum == checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) return mirror.file_checksum = checksum if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() if have_checksum >= max_mirrors: text = '{0!s} mirrors already have this '.format(max_mirrors) text += 'checksum, ignoring refresh to save time' info_message.send(sender=None, text=text) else: packages = extract_yum_packages(data, primary_url) if packages: update_mirror_packages(mirror, packages)
def refresh_rpm_repo(repo): """ Refresh an rpm repo. Checks if the repo url is a mirrorlist, and extracts mirrors if so. If not, checks a number of common rpm repo formats to determine which type of repo it is, and to determine the mirror urls. """ formats = [ 'repodata/repomd.xml.bz2', 'repodata/repomd.xml.gz', 'repodata/repomd.xml', 'suse/repodata/repomd.xml.bz2', 'suse/repodata/repomd.xml.gz', 'suse/repodata/repomd.xml', 'content', ] if lzma is not None: formats.insert(0, 'repodata/repomd.xml.xz') formats.insert(4, 'suse/repodata/repomd.xml.xz') check_for_mirrorlists(repo) check_for_metalinks(repo) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS ts = datetime.now().replace(microsecond=0) enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) for i, mirror in enumerate(enabled_mirrors): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: if i >= max_mirrors: text = '{0!s} mirrors already refreshed, '.format(max_mirrors) text += ' not refreshing {0!s}'.format(mirror.url) warning_message.send(sender=None, text=text) continue data = download_url(res, 'Downloading repo info (1/2):') if data is None: mirror.fail() return mirror_url = res.url if res.url.endswith('content'): text = 'Found yast rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yast_repo(mirror, data) else: text = 'Found yum rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yum_repo(mirror, data, mirror_url, ts) mirror.timestamp = ts else: mirror.fail() mirror.save()
def refresh(self, force=False): """ Refresh all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ if force: for mirror in self.mirror_set.all(): mirror.file_checksum = None mirror.save() if not self.auth_required: if self.repotype == Repository.DEB: refresh_deb_repo(self) elif self.repotype == Repository.RPM: refresh_rpm_repo(self) else: text = 'Error: unknown repo type for repo ' text += '{0!s}: {1!s}'.format(self.id, self.repotype) error_message.send(sender=None, text=text) else: text = 'Repo requires certificate authentication, not updating' warning_message.send(sender=None, text=text)
def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS fname = '{0!s}/{1!s}.db'.format(repo.arch, repo.repo_id) ts = datetime.now().replace(microsecond=0) for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): res = find_mirror_url(mirror.url, [fname]) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: if i >= max_mirrors: text = '{0!s} mirrors already refreshed, '.format(max_mirrors) text += ' not refreshing {0!s}'.format(mirror.url) warning_message.send(sender=None, text=text) continue mirror_url = res.url text = 'Found arch repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return computed_checksum = get_checksum(data, Checksum.sha1) if mirror.file_checksum == computed_checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_arch_packages(data) mirror.last_access_ok = True mirror.timestamp = ts update_mirror_packages(mirror, packages) mirror.file_checksum = computed_checksum packages.clear() else: mirror.fail() mirror.save()
def add_mirrors_from_urls(repo, mirror_urls): """ Creates mirrors from a list of mirror urls """ for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls are and then downloads and extracts packages from those files. """ formats = ['Packages.bz2', 'Packages.gz', 'Packages'] if lzma is not None: formats.insert(0, 'Packages.xz') ts = datetime.now().replace(microsecond=0) for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: mirror_url = res.url text = 'Found deb repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return computed_checksum = get_checksum(data, Checksum.sha1) if mirror.file_checksum == computed_checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_deb_packages(data, mirror_url) mirror.last_access_ok = True mirror.timestamp = ts update_mirror_packages(mirror, packages) mirror.file_checksum = computed_checksum packages.clear() else: mirror.fail() mirror.save()
def add_mirrors_from_urls(mirror, mirror_urls): """ Creates mirrors from a list of mirror urls """ for mirror_url in mirror_urls: mirror_url = mirror_url.decode('ascii') mirror_url = mirror_url.replace('$ARCH', mirror.repo.arch.name) mirror_url = mirror_url.replace('$basearch', mirror.repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = mirror.repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from patchman.repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=mirror.repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls are and then downloads and extracts packages from those files. """ formats = ['Packages.bz2', 'Packages.gz', 'Packages'] if lzma is not None: formats.insert(0, 'Packages.xz') for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: mirror_url = res.url text = 'Found deb repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return sha1 = get_sha1(data) if mirror.file_checksum == sha1: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_deb_packages(data, mirror_url) mirror.last_access_ok = True mirror.timestamp = datetime.now() update_mirror_packages(mirror, packages) mirror.file_checksum = sha1 packages.clear() else: mirror.fail() mirror.save()