def checksum_is_valid(mirror, checksum, checksum_type, data): """ Check the checksum of the data, returns True if checksum is valid, or False if it is invalid or if it has not changed. """ if checksum_type == 'sha': sha = get_sha1(data) elif checksum_type == 'sha256': sha = get_sha256(data) else: text = 'Unknown checksum type: %s\n' % checksum_type error_message.send(sender=None, text=text) if sha != checksum: text = '%s checksum failed for mirror %s' % (checksum_type, mirror.id) text += ', not refreshing package metadata\n' error_message.send(sender=None, text=text) mirror.last_access_ok = False return False elif mirror.file_checksum == sha: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata\n' info_message.send(sender=None, text=text) return False return True
def update_yum_repo(mirror, data, repo_url): """ Update package metadata from yum-style rpm repo Returns a list of packages on success, or None if there is no packages or access fails """ primary_url, checksum, checksum_type = get_primary_url(repo_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = check_response(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (2/2):') if checksum_type == 'sha': sha = get_sha1(data) elif checksum_type == 'sha256': sha = get_sha256(data) else: error_message.send(sender=None, text='Unknown checksum type: %s\n' % checksum_type) if sha != checksum: error_message.send(sender=None, text='%s checksum failed for mirror %s, not updating package metadata\n' % (checksum_type, mirror.id)) mirror.last_access_ok = False elif mirror.file_checksum == sha: info_message.send(sender=None, text='Mirror checksum has not changed, not updating package metadata\n') else: mirror.file_checksum = sha return extract_yum_packages(data) else: mirror.fail() return
def extract_deb_packages(data, packages): """ Extract package metadata from debian Packages file """ extracted = extract(data) package_re = re.compile('^Package: ', re.M) plen = len(package_re.findall(extracted)) if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) sio = StringIO(extracted) for i, stanza in enumerate(Sources.iter_paragraphs(sio)): fullversion = Version(stanza['version']) arch = stanza['architecture'] name = stanza['package'] epoch = fullversion._BaseVersion__epoch if epoch is None: epoch = '' version = fullversion._BaseVersion__upstream_version release = fullversion._BaseVersion__debian_revision if release is None: release = '' progress_update_s.send(sender=None, index=i + 1) package = PackageString(name=name, epoch=epoch, version=version, release=release, arch=arch, packagetype='D') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo\n')
def process_packages(report, host): """ Processes the quoted packages string sent with a report """ if report.packages: old_packages = host.packages.all() package_ids = [] packages = parse_packages(report.packages) progress_info_s.send(sender=None, ptext='{0!s} packages'.format(str(host)[0:25]), plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: package_ids.append(package.id) try: with transaction.atomic(): host.packages.add(package) except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) else: if pkg_str[0].lower() != 'gpg-pubkey': text = 'No package returned for {0!s}'.format(pkg_str) info_message.send(sender=None, text=text) progress_update_s.send(sender=None, index=i + 1) removals = old_packages.exclude(pk__in=package_ids) for package in removals: host.packages.remove(package)
def mirrorlists_check(repo): """ Check if any of the mirrors are actually mirrorlists """ for mirror in repo.mirror_set.all(): mirror_urls = mirrorlist_check(mirror.url) if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() text = 'Found mirrorlist - {0!s}'.format(mirror.url) info_message.send(sender=None, text=text) for mirror_url in mirror_urls: mirror_url = mirror_url.decode('ascii') mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = mirror.repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from patchman.repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def extract_yast_packages(data): """ Extract package metadata from yast metadata file """ extracted = extract(data, 'gz').decode('utf-8') pkgs = re.findall('=Pkg: (.*)', extracted) plen = len(pkgs) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) for i, pkg in enumerate(pkgs): progress_update_s.send(sender=None, index=i + 1) name, version, release, arch = pkg.split() package = PackageString(name=name.lower(), epoch='', version=version, release=release, arch=arch, packagetype='R') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls are and then downloads and extracts packages from those files. """ formats = ['Packages.bz2', 'Packages.gz', 'Packages'] for mirror in repo.mirror_set.filter(refresh=True): repo_url, res, unused = find_mirror_url(mirror.url, formats) mirror.last_access_ok = check_response(res) if mirror.last_access_ok: text = 'Found deb repo - %s\n' % repo_url info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return sha1 = get_sha1(data) if mirror.file_checksum == sha1: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata\n' info_message.send(sender=None, text=text) else: packages = set() extract_deb_packages(data, packages) mirror.last_access_ok = True mirror.timestamp = datetime.now() update_mirror_packages(mirror, packages) mirror.file_checksum = sha1 packages.clear() else: mirror.fail() mirror.save()
def extract_yast_packages(data): """ Extract package metadata from yast metadata file """ extracted = extract(data, 'gz') pkgs = re.findall(b'=Pkg: (.*)', extracted) plen = len(pkgs) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) for i, pkg in enumerate(pkgs): progress_update_s.send(sender=None, index=i + 1) name, version, release, arch = str(pkg).split() package = PackageString(name=name.lower(), epoch='', version=version, release=release, arch=arch, packagetype='R') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def show(self): """ Show info about this mirror """ text = ' {0!s} : {1!s}\n'.format(self.id, self.url) text += ' last updated: ' text += '{0!s} checksum: {1!s}\n'.format(self.timestamp, self.file_checksum) info_message.send(sender=None, text=text)
def refresh_yum_repo(mirror, data, mirror_url, ts): """ Refresh package metadata for a yum-style rpm mirror and add the packages to the mirror """ primary_url, checksum, checksum_type = get_primary_url(mirror_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = response_is_valid(res) if not mirror.last_access_ok: mirror.fail() return data = download_url(res, 'Downloading repo info (2/2):') if data is None: mirror.fail() return sha = get_sha(checksum_type, data) if sha is None: mirror.fail() return if not checksum_is_valid(sha, checksum, mirror): mirror.fail() return if mirror.file_checksum == checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) return mirror.file_checksum = checksum if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() if have_checksum >= max_mirrors: text = '{0!s} mirrors already have this '.format(max_mirrors) text += 'checksum, ignoring refresh to save time' info_message.send(sender=None, text=text) else: packages = extract_yum_packages(data, primary_url) if packages: update_mirror_packages(mirror, packages)
def show(self): """ Show info about this mirror """ text = [' %s : %s\n' % (self.id, self.url), ' last updated: %s checksum: %s\n' % (self.timestamp, self.file_checksum)] for line in text: info_message.send(sender=None, text=line)
def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: host_repos = \ Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False # If any of the containing repos are security, # mark the update as security for mirror in mirrors: if mirror.repo.security: security = True updates = PackageUpdate.objects.all() # see if any version of this update exists # if it's already marked as a security update, leave it that way # if not, mark it as a security update # this could be an issue if different distros mark the same update # in different ways (security vs bugfix) but in reality this is not # very likely to happen. if it does, we err on the side of caution # and mark it as the security update try: update = updates.get(oldpackage=package, newpackage=highest_package) except PackageUpdate.DoesNotExist: update = None try: if update: if security and not update.security: update.security = True with transaction.atomic(): update.save() else: with transaction.atomic(): update, c = updates.get_or_create( oldpackage=package, newpackage=highest_package, security=security) except IntegrityError as e: error_message.send(sender=None, text=e) update = updates.get(oldpackage=package, newpackage=highest_package, security=security) except DatabaseError as e: error_message.send(sender=None, text=e) try: with transaction.atomic(): self.updates.add(update) info_message.send(sender=None, text='{0!s}'.format(update)) return update.id except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e)
def refresh_rpm_repo(repo): """ Refresh an rpm repo. Checks if the repo url is a mirrorlist, and extracts mirrors if so. If not, checks a number of common rpm repo formats to determine which type of repo it is, and to determine the mirror urls. """ formats = [ 'repodata/repomd.xml.bz2', 'repodata/repomd.xml.gz', 'repodata/repomd.xml', 'suse/repodata/repomd.xml.bz2', 'suse/repodata/repomd.xml.gz', 'suse/repodata/repomd.xml', 'content', ] if lzma is not None: formats.insert(0, 'repodata/repomd.xml.xz') formats.insert(4, 'suse/repodata/repomd.xml.xz') check_for_mirrorlists(repo) check_for_metalinks(repo) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS ts = datetime.now().replace(microsecond=0) enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) for i, mirror in enumerate(enabled_mirrors): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: if i >= max_mirrors: text = '{0!s} mirrors already refreshed, '.format(max_mirrors) text += ' not refreshing {0!s}'.format(mirror.url) warning_message.send(sender=None, text=text) continue data = download_url(res, 'Downloading repo info (1/2):') if data is None: mirror.fail() return mirror_url = res.url if res.url.endswith('content'): text = 'Found yast rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yast_repo(mirror, data) else: text = 'Found yum rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yum_repo(mirror, data, mirror_url, ts) mirror.timestamp = ts else: mirror.fail() mirror.save()
def show(self): """ Show info about this repo, including mirrors """ text = '{0!s} : {1!s}\n'.format(self.id, self.name) text += 'security: {0!s} '.format(self.security) text += 'arch: {0!s}\n'.format(self.arch) text += 'Mirrors:' info_message.send(sender=None, text=text) for mirror in self.mirror_set.all(): mirror.show()
def check_for_mirrorlists(repo): """ Check if any of the mirrors are actually mirrorlists. Creates MAX_MIRRORS mirrors from list if so. """ for mirror in repo.mirror_set.all(): mirror_urls = get_mirrorlist_urls(mirror.url) if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() text = 'Found mirrorlist - {0!s}'.format(mirror.url) info_message.send(sender=None, text=text) add_mirrors_from_urls(mirror, mirror_urls)
def show(self): """ Show info about this repo, including mirrors """ text = ['%s : %s\n' % (self.id, self.name), 'security: %s arch: %s\n' % (self.security, self.arch), 'Mirrors:\n'] for line in text: info_message.send(sender=None, text=line) for mirror in self.mirror_set.all(): mirror.show() info_message.send(sender=None, text='\n')
def refresh_rpm_repo(repo): """ Refresh an rpm repo. Checks if the repo url is a mirrorlist, and extracts mirrors if so. If not, checks a number of common rpm repo formats to determine which type of repo it is, and to determine the mirror urls. """ formats = [ 'repodata/repomd.xml.bz2', 'repodata/repomd.xml.gz', 'repodata/repomd.xml', 'suse/repodata/repomd.xml.bz2', 'suse/repodata/repomd.xml.gz', 'suse/repodata/repomd.xml', 'content', ] if lzma is not None: formats.insert(0, 'repodata/repomd.xml.xz') formats.insert(4, 'suse/repodata/repomd.xml.xz') check_for_mirrorlists(repo) check_for_metalinks(repo) ts = datetime.now().replace(microsecond=0) for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (1/2):') if data is None: mirror.fail() return mirror_url = res.url if res.url.endswith('content'): text = 'Found yast rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yast_repo(mirror, data) else: text = 'Found yum rpm repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) refresh_yum_repo(mirror, data, mirror_url, ts) mirror.timestamp = ts else: mirror.fail() mirror.save()
def process_update(self, package, highest, highestpackage): if highest != ('', '0', ''): if self.host_repos_only: hostrepos = Q(repo__host=self) else: hostrepos = Q(repo__osgroup__os__host=self, repo__arch=self.arch) | Q(repo__host=self) matchingrepos = highestpackage.mirror_set.filter(hostrepos) security = False # If any of the containing repos are security, mark the update as security for mirror in matchingrepos: if mirror.repo.security: security = True update, c = PackageUpdate.objects.get_or_create(oldpackage=package, newpackage=highestpackage, security=security) self.updates.add(update) info_message.send(sender=None, text="%s\n" % update)
def extract_arch_packages(data): """ Extract package metadata from an arch linux tarfile """ from packages.utils import find_evr extracted = BytesIO(extract(data, 'gz')) tf = tarfile.open(fileobj=extracted, mode='r:*') packages = set() plen = len(tf.getnames()) if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) for i, tarinfo in enumerate(tf): progress_update_s.send(sender=None, index=i + 1) if tarinfo.isfile(): name_sec = ver_sec = arch_sec = False t = tf.extractfile(tarinfo).read() for line in t.decode('utf-8').splitlines(): if line.startswith('%NAME%'): name_sec = True continue if name_sec: name_sec = False name = line continue if line.startswith('%VERSION%'): ver_sec = True continue if ver_sec: ver_sec = False epoch, version, release = find_evr(line) continue if line.startswith('%ARCH%'): arch_sec = True continue if arch_sec: arch_sec = False arch = line continue package = PackageString(name=name.lower(), epoch=epoch, version=version, release=release, arch=arch, packagetype='A') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def check_for_metalinks(repo): """ Checks a set of mirrors for metalinks and creates MAX_MIRRORS mirrors if so. """ for mirror in repo.mirror_set.all(): if is_metalink(mirror.url): mirror_urls = get_metalink_urls(mirror.url) else: continue if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() text = 'Found metalink - {0!s}'.format(mirror.url) info_message.send(sender=None, text=text) add_mirrors_from_urls(mirror, mirror_urls)
def extract_yum_packages(data, url): """ Extract package metadata from a yum primary.xml file """ extracted = extract(data, url) ns = 'http://linux.duke.edu/metadata/common' context = etree.iterparse(BytesIO(extracted), tag='{{{0!s}}}metadata'.format(ns)) plen = int(next(context)[1].get('packages')) context = etree.iterparse(BytesIO(extracted), tag='{{{0!s}}}package'.format(ns)) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) for i, data in enumerate(context): elem = data[1] progress_update_s.send(sender=None, index=i + 1) name = elem.xpath('//ns:name', namespaces={'ns': ns})[0].text.lower() arch = elem.xpath('//ns:arch', namespaces={'ns': ns})[0].text fullversion = elem.xpath('//ns:version', namespaces={'ns': ns})[0] epoch = fullversion.get('epoch') version = fullversion.get('ver') release = fullversion.get('rel') elem.clear() while elem.getprevious() is not None: del elem.getparent()[0] if name != '' and version != '' and arch != '': if epoch == '0': epoch = '' package = PackageString(name=name, epoch=epoch, version=version, release=release, arch=arch, packagetype='R') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def mirrorlists_check(repo): """ Check if any of the mirrors are actually mirrorlists """ for mirror in repo.mirror_set.all(): mirror_urls = mirrorlist_check(mirror.url) if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() info_message.send(sender=None, text='Found mirrorlist - %s\n' % mirror.url) for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) from patchman.repos.models import Mirror new_mirror, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: info_message.send(sender=None, text='Added mirror - %s\n' % mirror_url)
def update(self, force=False): """ Update all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ if force: for mirror in self.mirror_set.all(): mirror.file_checksum = None mirror.save() if not self.auth_required: if self.repotype == Repository.DEB: update_deb_repo(self) elif self.repotype == Repository.RPM: update_rpm_repo(self) else: error_message.send(sender=None, text='Error: unknown repo type for repo %s: %s\n' % (self.id, self.repotype)) else: info_message.send(sender=None, text='Repo requires certificate authentication, not updating\n')
def refresh_rpm_repo(repo): """ Refresh an rpm repo. Checks if the repo url is a mirrorlist, and extracts mirrors if so. If not, checks a number of common rpm repo formats to determine which type of repo it is, and to determine the mirror urls. """ formats = [ 'repodata/repomd.xml.bz2', 'repodata/repomd.xml.gz', 'repodata/repomd.xml', 'suse/repodata/repomd.xml.bz2', 'suse/repodata/repomd.xml.gz', 'suse/repodata/repomd.xml', 'content' ] mirrorlists_check(repo) ts = datetime.now().replace(microsecond=0) for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True): repo_url, res, yast = find_mirror_url(mirror.url, formats) mirror.last_access_ok = check_response(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (1/2):') if data is None: mirror.fail() return if not yast: text = 'Found yum rpm repo - %s\n' % repo_url info_message.send(sender=None, text=text) refresh_yum_repo(mirror, data, repo_url, ts) else: text = 'Found yast rpm repo - %s\n' % repo_url info_message.send(sender=None, text=text) refresh_yast_repo(mirror, data, repo_url) mirror.timestamp = ts else: mirror.fail() mirror.save()
def show(self): """ Show info about this host """ text = '{0!s}:\n'.format(self) text += 'IP address : {0!s}\n'.format(self.ipaddress) text += 'Reverse DNS : {0!s}\n'.format(self.reversedns) text += 'Domain : {0!s}\n'.format(self.domain) text += 'OS : {0!s}\n'.format(self.os) text += 'Kernel : {0!s}\n'.format(self.kernel) text += 'Architecture : {0!s}\n'.format(self.arch) text += 'Last report : {0!s}\n'.format(self.lastreport) text += 'Packages : {0!s}\n'.format(self.get_num_packages()) text += 'Repos : {0!s}\n'.format(self.get_num_repos()) text += 'Updates : {0!s}\n'.format(self.get_num_updates()) text += 'Tags : {0!s}\n'.format(self.tags) text += 'Needs reboot : {0!s}\n'.format(self.reboot_required) text += 'Updated at : {0!s}\n'.format(self.updated_at) text += 'Host repos : {0!s}\n'.format(self.host_repos_only) info_message.send(sender=None, text=text)
def refresh_yum_repo(mirror, data, repo_url, ts): """ Refresh package metadata for a yum-style rpm mirror and add the packages to the mirror """ primary_url, checksum, checksum_type = get_primary_url(repo_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (2/2):') if data is None: mirror.fail() return valid = checksum_is_valid(mirror, checksum, checksum_type, data) if valid: mirror.file_checksum = checksum else: mirror.fail() return if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() if have_checksum >= max_mirrors: text = '%s mirrors already have this checksum, ' % max_mirrors text += 'ignoring refresh to save time\n' info_message.send(sender=None, text=text) else: packages = extract_yum_packages(data) if packages: update_mirror_packages(mirror, packages) else: mirror.fail()
def mirrorlists_check(repo): """ Check if any of the mirrors are actually mirrorlists """ for mirror in repo.mirror_set.all(): mirror_urls = mirrorlist_check(mirror.url) if mirror_urls: mirror.mirrorlist = True mirror.last_access_ok = True text = 'Found mirrorlist - %s\n' % mirror.url info_message.send(sender=None, text=text) for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ type(settings.MAX_MIRRORS) == int: max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = mirror.repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '%s mirrors already exist, not adding %s\n' \ % (max_mirrors, mirror_url) info_message.send(sender=None, text=text) continue from patchman.repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: text = 'Added mirror - %s\n' % mirror_url info_message.send(sender=None, text=text)
def extract_deb_packages(data, url): """ Extract package metadata from debian Packages file """ extracted = extract(data, url) package_re = re.compile(b'^Package: ', re.M) plen = len(package_re.findall(extracted)) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) bio = BytesIO(extracted) for i, stanza in enumerate(Packages.iter_paragraphs(bio)): # https://github.com/furlongm/patchman/issues/55 if 'version' not in stanza: continue fullversion = Version(stanza['version']) arch = stanza['architecture'] name = stanza['package'] epoch = fullversion._BaseVersion__epoch if epoch is None: epoch = '' version = fullversion._BaseVersion__upstream_version release = fullversion._BaseVersion__debian_revision if release is None: release = '' progress_update_s.send(sender=None, index=i + 1) package = PackageString(name=name, epoch=epoch, version=version, release=release, arch=arch, packagetype='D') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS fname = '{0!s}/{1!s}.db'.format(repo.arch, repo.repo_id) ts = datetime.now().replace(microsecond=0) for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): res = find_mirror_url(mirror.url, [fname]) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: if i >= max_mirrors: text = '{0!s} mirrors already refreshed, '.format(max_mirrors) text += ' not refreshing {0!s}'.format(mirror.url) warning_message.send(sender=None, text=text) continue mirror_url = res.url text = 'Found arch repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return computed_checksum = get_checksum(data, Checksum.sha1) if mirror.file_checksum == computed_checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_arch_packages(data) mirror.last_access_ok = True mirror.timestamp = ts update_mirror_packages(mirror, packages) mirror.file_checksum = computed_checksum packages.clear() else: mirror.fail() mirror.save()
def add_mirrors_from_urls(repo, mirror_urls): """ Creates mirrors from a list of mirror urls """ for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls are and then downloads and extracts packages from those files. """ formats = ['Packages.bz2', 'Packages.gz', 'Packages'] if lzma is not None: formats.insert(0, 'Packages.xz') ts = datetime.now().replace(microsecond=0) for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: mirror_url = res.url text = 'Found deb repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return computed_checksum = get_checksum(data, Checksum.sha1) if mirror.file_checksum == computed_checksum: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_deb_packages(data, mirror_url) mirror.last_access_ok = True mirror.timestamp = ts update_mirror_packages(mirror, packages) mirror.file_checksum = computed_checksum packages.clear() else: mirror.fail() mirror.save()
def add_mirrors_from_urls(mirror, mirror_urls): """ Creates mirrors from a list of mirror urls """ for mirror_url in mirror_urls: mirror_url = mirror_url.decode('ascii') mirror_url = mirror_url.replace('$ARCH', mirror.repo.arch.name) mirror_url = mirror_url.replace('$basearch', mirror.repo.arch.name) if hasattr(settings, 'MAX_MIRRORS') and \ isinstance(settings.MAX_MIRRORS, int): max_mirrors = settings.MAX_MIRRORS # only add X mirrors, where X = max_mirrors q = Q(mirrorlist=False, refresh=True) existing = mirror.repo.mirror_set.filter(q).count() if existing >= max_mirrors: text = '{0!s} mirrors already '.format(max_mirrors) text += 'exist, not adding {0!s}'.format(mirror_url) warning_message.send(sender=None, text=text) continue from patchman.repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=mirror.repo, url=mirror_url) if c: text = 'Added mirror - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text)
def show(self): """ Show info about this host """ text = [] text.append('%s:\n' % self) text.append('IP address : %s\n' % self.ipaddress) text.append('Reverse DNS : %s\n' % self.reversedns) text.append('Domain : %s\n' % self.domain) text.append('OS : %s\n' % self.os) text.append('Kernel : %s\n' % self.kernel) text.append('Architecture : %s\n' % self.arch) text.append('Last report : %s\n' % self.lastreport) text.append('Packages : %s\n' % self.get_num_packages()) text.append('Repos : %s\n' % self.get_num_repos()) text.append('Updates : %s\n' % self.get_num_updates()) text.append('Tags : %s\n' % self.tags) text.append('Needs reboot : %s\n' % self.reboot_required) text.append('Updated at : %s\n' % self.updated_at) text.append('Host repos : %s\n' % self.host_repos_only) text.append('\n') for line in text: info_message.send(sender=None, text=line)
def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: host_repos = \ Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False # If any of the containing repos are security, # mark the update as security for mirror in mirrors: if mirror.repo.security: security = True try: updates = PackageUpdate.objects.all() with transaction.atomic(): update, c = updates.get_or_create( oldpackage=package, newpackage=highest_package, security=security) except IntegrityError as e: print e update = updates.get(oldpackage=package, newpackage=highest_package, security=security) except DatabaseError as e: print e try: with transaction.atomic(): self.updates.add(update) info_message.send(sender=None, text="%s\n" % update) return update.id except IntegrityError as e: print e except DatabaseError as e: print e
def check_rdns(self): if self.check_dns: update_rdns(self) if self.hostname == self.reversedns: info_message.send(sender=None, text='Reverse DNS matches.\n') else: info_message.send(sender=None, text='Reverse DNS mismatch found: %s != %s\n' % (self.hostname, self.reversedns)) else: info_message.send(sender=None, text='Reverse DNS check disabled for this host.\n')
def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls are and then downloads and extracts packages from those files. """ formats = ['Packages.bz2', 'Packages.gz', 'Packages'] if lzma is not None: formats.insert(0, 'Packages.xz') for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: mirror_url = res.url text = 'Found deb repo - {0!s}'.format(mirror_url) info_message.send(sender=None, text=text) data = download_url(res, 'Downloading repo info:') if data is None: mirror.fail() return sha1 = get_sha1(data) if mirror.file_checksum == sha1: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata' warning_message.send(sender=None, text=text) else: packages = extract_deb_packages(data, mirror_url) mirror.last_access_ok = True mirror.timestamp = datetime.now() update_mirror_packages(mirror, packages) mirror.file_checksum = sha1 packages.clear() else: mirror.fail() mirror.save()
def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: host_repos = \ Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False # If any of the containing repos are security, # mark the update as security for mirror in mirrors: if mirror.repo.security: security = True try: updates = PackageUpdate.objects.all() with transaction.atomic(): update, c = updates.get_or_create( oldpackage=package, newpackage=highest_package, security=security) except IntegrityError as e: error_message.send(sender=None, text=e) update = updates.get(oldpackage=package, newpackage=highest_package, security=security) except DatabaseError as e: error_message.send(sender=None, text=e) try: with transaction.atomic(): self.updates.add(update) info_message.send(sender=None, text='{0!s}'.format(update)) return update.id except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e)
def check_rdns(self): if self.check_dns: update_rdns(self) if self.hostname.lower() == self.reversedns.lower(): info_message.send(sender=None, text='Reverse DNS matches') else: text = 'Reverse DNS mismatch found: ' text += '{0!s} != {1!s}'.format(self.hostname, self.reversedns) info_message.send(sender=None, text=text) else: info_message.send(sender=None, text='Reverse DNS check disabled')
def check_rdns(self): if self.check_dns: update_rdns(self) if self.hostname == self.reversedns: info_message.send(sender=None, text='Reverse DNS matches') else: text = 'Reverse DNS mismatch found: ' text += '{0!s} != {1!s}'.format(self.hostname, self.reversedns) info_message.send(sender=None, text=text) else: info_message.send(sender=None, text='Reverse DNS check disabled')
def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch and not self.processed: oses = OS.objects.all() with transaction.atomic(): os, c = oses.get_or_create(name=self.os) machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): arch, c = machine_arches.get_or_create(name=self.arch) if not self.domain: self.domain = 'unknown' domains = Domain.objects.all() with transaction.atomic(): domain, c = domains.get_or_create(name=self.domain) if not self.host: try: self.host = str(gethostbyaddr(self.report_ip)[0]) except herror: self.host = self.report_ip hosts = Host.objects.all() with transaction.atomic(): host, c = hosts.get_or_create(hostname=self.host, defaults={ 'ipaddress': self.report_ip, 'arch': arch, 'os': os, 'domain': domain, 'lastreport': self.created, }) host.ipaddress = self.report_ip host.kernel = self.kernel host.arch = arch host.os = os host.domain = domain host.lastreport = self.created host.tags = self.tags if self.reboot == 'True': host.reboot_required = True else: host.reboot_required = False try: with transaction.atomic(): host.save() except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) host.check_rdns() if verbose: text = 'Processing report ' text += '{0!s} - {1!s}'.format(self.id, self.host) info_message.send(sender=None, text=text) from reports.utils import process_packages, \ process_repos, process_updates with transaction.atomic(): process_repos(report=self, host=host) with transaction.atomic(): process_packages(report=self, host=host) with transaction.atomic(): process_updates(report=self, host=host) self.processed = True with transaction.atomic(): self.save() if find_updates: if verbose: text = 'Finding updates for report ' text += '{0!s} - {1!s}'.format(self.id, self.host) info_message.send(sender=None, text=text) host.find_updates() else: if self.processed: text = 'Report {0!s} '.format(self.id) text += 'has already been processed' info_message.send(sender=None, text=text) else: text = 'Error: OS, kernel or arch not sent ' text += 'with report {0!s}'.format(self.id) error_message.send(sender=None, text=text)