def process_packages(report, host): """ Processes the quoted packages string sent with a report """ if report.packages: old_packages = host.packages.all() package_ids = [] packages = parse_packages(report.packages) progress_info_s.send(sender=None, ptext='{0!s} packages'.format(str(host)[0:25]), plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: package_ids.append(package.id) try: with transaction.atomic(): host.packages.add(package) except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) else: if pkg_str[0].lower() != 'gpg-pubkey': text = 'No package returned for {0!s}'.format(pkg_str) info_message.send(sender=None, text=text) progress_update_s.send(sender=None, index=i + 1) removals = old_packages.exclude(pk__in=package_ids) for package in removals: host.packages.remove(package)
def checksum_is_valid(mirror, checksum, checksum_type, data): """ Check the checksum of the data, returns True if checksum is valid, or False if it is invalid or if it has not changed. """ if checksum_type == 'sha': sha = get_sha1(data) elif checksum_type == 'sha256': sha = get_sha256(data) else: text = 'Unknown checksum type: %s\n' % checksum_type error_message.send(sender=None, text=text) if sha != checksum: text = '%s checksum failed for mirror %s' % (checksum_type, mirror.id) text += ', not refreshing package metadata\n' error_message.send(sender=None, text=text) mirror.last_access_ok = False return False elif mirror.file_checksum == sha: text = 'Mirror checksum has not changed, ' text += 'not refreshing package metadata\n' info_message.send(sender=None, text=text) return False return True
def parse_errata_children(e, children): """ Parse errata children to obtain architecture, release and packages """ for c in children: if c.tag == 'os_arch': m_arches = MachineArchitecture.objects.all() with transaction.atomic(): m_arch, c = m_arches.get_or_create(name=c.text) e.arches.add(m_arch) elif c.tag == 'os_release': from operatingsystems.models import OSGroup osgroups = OSGroup.objects.all() osgroup_name = 'CentOS {0!s}'.format(c.text) with transaction.atomic(): osgroup, c = osgroups.get_or_create(name=osgroup_name) e.releases.add(osgroup) elif c.tag == 'packages': pkg_str = c.text.replace('.rpm', '') pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa m = pkg_re.match(pkg_str) if m: name, epoch, ver, rel, dist, arch = m.groups() else: e = 'Error parsing errata: ' e += 'could not parse package "{0!s}"'.format(pkg_str) error_message.send(sender=None, text=e) continue if dist: rel = '{0!s}.{1!s}'.format(rel, dist) p_type = Package.RPM pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) e.packages.add(pkg)
def find_kernel_updates(self, kernel_packages, repo_packages): update_ids = [] for package in kernel_packages: host_highest = package repo_highest = package pk_q = Q(name=package.name) potential_updates = repo_packages.filter(pk_q) for pu in potential_updates: if package.compare_version(pu) == -1 \ and repo_highest.compare_version(pu) == -1: repo_highest = pu host_packages = self.packages.filter(pk_q) for hp in host_packages: if package.compare_version(hp) == -1 and \ host_highest.compare_version(hp) == -1: host_highest = hp if host_highest.compare_version(repo_highest) == -1: uid = self.process_update(host_highest, repo_highest) if uid is not None: update_ids.append(uid) self.check_if_reboot_required(host_highest) try: with transaction.atomic(): self.save() except DatabaseError as e: error_message.send(sender=None, text=e) return update_ids
def mark_errata_security_updates(): """ For each set of erratum packages, modify any PackageUpdate that should be marked as a security update. """ package_updates = PackageUpdate.objects.all() errata = Erratum.objects.all() elen = Erratum.objects.count() ptext = 'Scanning {0!s} Errata:'.format(elen) progress_info_s.send(sender=None, ptext=ptext, plen=elen) for i, erratum in enumerate(errata): progress_update_s.send(sender=None, index=i + 1) if erratum.etype == 'security': for package in erratum.packages.all(): affected_updates = package_updates.filter( newpackage=package, security=False ) for affected_update in affected_updates: if not affected_update.security: affected_update.security = True try: with transaction.atomic(): affected_update.save() except IntegrityError as e: error_message.send(sender=None, text=e) # a version of this update already exists that is # marked as a security update, so delete this one affected_update.delete()
def process_packages(report, host): """ Processes the quoted packages string sent with a report """ if report.packages: package_ids = [] packages = parse_packages(report.packages) progress_info_s.send(sender=None, ptext='{0!s} packages'.format(str(host)[0:25]), plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: package_ids.append(package.id) try: with transaction.atomic(): host.packages.add(package) except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) else: if pkg_str[0].lower() != 'gpg-pubkey': text = 'No package returned for {0!s}'.format(pkg_str) info_message.send(sender=None, text=text) progress_update_s.send(sender=None, index=i + 1) for package in host.packages.all(): if package.id not in package_ids: host.packages.remove(package)
def find_kernel_updates(self, kernel_packages, repo_packages): update_ids = [] for package in kernel_packages: host_highest = package repo_highest = package pu_q = Q(name=package.name) potential_updates = repo_packages.filter(pu_q) for pu in potential_updates: if package.compare_version(pu) == -1 \ and repo_highest.compare_version(pu) == -1: repo_highest = pu host_packages = self.packages.filter(pu_q) for hp in host_packages: if package.compare_version(hp) == -1 and \ host_highest.compare_version(hp) == -1: host_highest = hp if host_highest.compare_version(repo_highest) == -1: uid = self.process_update(host_highest, repo_highest) if uid is not None: update_ids.append(uid) self.check_if_reboot_required(host_highest) try: with transaction.atomic(): self.save() except DatabaseError as e: error_message.send(sender=None, text=e) return update_ids
def download_url(res, text=''): """ Display a progress bar to download the request content if verbose is True. Otherwise, just return the request content """ global verbose if verbose and 'content-length' in res.headers: clen = int(res.headers['content-length']) create_pbar(text, clen) chunk_size = 16384 i = 0 data = b'' while i < clen: chunk = res.raw.read(chunk_size) i += len(chunk) if i <= clen: update_pbar(i) data += chunk if i != clen: update_pbar(clen) text = 'Data length != Content-Length ' text += '({0!s} != {1!s})'.format(i, clen) error_message.send(sender=None, text=text) return data else: return res.content
def update_yum_repo(mirror, data, repo_url): """ Update package metadata from yum-style rpm repo Returns a list of packages on success, or None if there is no packages or access fails """ primary_url, checksum, checksum_type = get_primary_url(repo_url, data) if not primary_url: mirror.fail() return res = get_url(primary_url) mirror.last_access_ok = check_response(res) if mirror.last_access_ok: data = download_url(res, 'Downloading repo info (2/2):') if checksum_type == 'sha': sha = get_sha1(data) elif checksum_type == 'sha256': sha = get_sha256(data) else: error_message.send(sender=None, text='Unknown checksum type: %s\n' % checksum_type) if sha != checksum: error_message.send(sender=None, text='%s checksum failed for mirror %s, not updating package metadata\n' % (checksum_type, mirror.id)) mirror.last_access_ok = False elif mirror.file_checksum == sha: info_message.send(sender=None, text='Mirror checksum has not changed, not updating package metadata\n') else: mirror.file_checksum = sha return extract_yum_packages(data) else: mirror.fail() return
def process_repos(report, host): """ Processes the quoted repos string sent with a report """ if report.repos: repo_ids = [] host_repos = HostRepo.objects.filter(host=host) repos = parse_repos(report.repos) progress_info_s.send(sender=None, ptext='{0!s} repos'.format(str(host)[0:25]), plen=len(repos)) for i, repo_str in enumerate(repos): repo, priority = process_repo(repo_str, report.arch) if repo: repo_ids.append(repo.id) try: with transaction.atomic(): hostrepo, c = host_repos.get_or_create(host=host, repo=repo) except IntegrityError as e: error_message.send(sender=None, text=e) hostrepo = host_repos.get(host=host, repo=repo) try: if hostrepo.priority != priority: hostrepo.priority = priority with transaction.atomic(): hostrepo.save() except IntegrityError as e: error_message.send(sender=None, text=e) progress_update_s.send(sender=None, index=i + 1) for hostrepo in host_repos: if hostrepo.repo.id not in repo_ids: hostrepo.delete()
def gunzip(contents): """ gunzip contents in memory and return the data """ try: wbits = zlib.MAX_WBITS | 32 return zlib.decompress(contents, wbits) except zlib.error as e: error_message.send(sender=None, text='gunzip: ' + e)
def unxz(contents): """ unxz contents in memory and return the data """ try: xzdata = lzma.decompress(contents) return xzdata except lzma.LZMAError as e: error_message.send(sender=None, text='lzma: ' + e)
def gunzip(contents): """ gunzip contents in memory and return the data """ try: wbits = zlib.MAX_WBITS | 32 return zlib.decompress(contents, wbits) except zlib.error as e: error_message.send(sender=None, text='gunzip: ' + str(e))
def process(self, find_updates=True): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch: os, c = OS.objects.get_or_create(name=self.os) arch, c = MachineArchitecture.objects.get_or_create(name=self.arch) if not self.domain: self.domain = 'unknown' domain, c = Domain.objects.get_or_create(name=self.domain) if not self.host: try: self.host = str(gethostbyaddr(self.report_ip)[0]) except: self.host = self.report_ip host, c = Host.objects.get_or_create( hostname=self.host, defaults={ 'ipaddress': self.report_ip, 'arch': arch, 'os': os, 'domain': domain, 'lastreport': self.created, }) host.ipaddress = self.report_ip host.kernel = self.kernel host.arch = arch host.os = os host.domain = domain host.lastreport = self.created host.tags = self.tags from patchman.reports.utils import process_packages, process_repos, process_updates # only clear repos if we have a new list # apt and yum plugins don't send repos if self.repos: host.repos.clear() process_repos(report=self, host=host) host.packages.clear() process_packages(report=self, host=host) process_updates(report=self, host=host) if self.reboot == 'True': host.reboot_required = True else: host.reboot_required = False host.check_rdns() host.save() self.processed = True self.save() if find_updates: host.find_updates() else: error_message.send(sender=None, text='Error: OS, kernel or arch not sent with report %s\n' % (self.id))
def fail(self): """ Records that the mirror has failed Disables refresh on a mirror if it fails more than 28 times """ error_message.send(sender=None, text='No usable mirror found at %s\n' % self.url) self.fail_count = self.fail_count + 1 if self.fail_count > 28: self.refresh = False error_message.send(sender=None, text='Mirror has failed more than 28 times, disabling refresh\n')
def get_sha(checksum_type, data): """ Returns the checksum of the data. Returns None otherwise. """ if checksum_type == 'sha' or checksum_type == 'sha1': sha = get_sha1(data) elif checksum_type == 'sha256': sha = get_sha256(data) else: text = 'Unknown checksum type: {0!s}'.format(checksum_type) error_message.send(sender=None, text=text) return sha
def fail(self): """ Records that the mirror has failed Disables refresh on a mirror if it fails more than 28 times """ text = 'No usable mirror found at {0!s}'.format(self.url) error_message.send(sender=None, text=text) self.fail_count = self.fail_count + 1 if self.fail_count > 28: self.refresh = False text = 'Mirror has failed more than 28 times, disabling refresh' error_message.send(sender=None, text=text)
def bunzip2(contents): """ bunzip2 contents in memory and return the data """ try: bzip2data = bz2.decompress(contents) return bzip2data except IOError as e: if e == 'invalid data stream': error_message.send(sender=None, text='bunzip2: ' + e) except ValueError as e: if e == 'couldn\'t find end of stream': error_message.send(sender=None, text='bunzip2: ' + e)
def get_checksum(data, checksum_type): """ Returns the checksum of the data. Returns None otherwise. """ if checksum_type == Checksum.sha or checksum_type == Checksum.sha1: checksum = get_sha1(data) elif checksum_type == Checksum.sha256: checksum = get_sha256(data) elif checksum_type == Checksum.md5: checksum = get_md5(data) else: text = 'Unknown checksum type: {0!s}'.format(checksum_type) error_message.send(sender=None, text=text) return checksum
def gunzip(contents): """ gunzip contents in memory and return the data """ try: gzipdata = gzip.GzipFile(fileobj=contents) gzipdata = gzipdata.read() contents = BytesIO(gzipdata) return contents.getvalue() except IOError as e: import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) if e.message == 'Not a gzipped file': error_message.send(sender=None, text='gunzip: ' + e.message)
def update_errata(force=False): """ Update CentOS errata from https://cefs.steve-meier.de/ and mark packages that are security updates """ data = download_errata_checksum() expected_checksum = parse_errata_checksum(data) data = download_errata() actual_checksum = get_sha1(data) if actual_checksum != expected_checksum: e = 'CEFS checksum did not match, skipping errata parsing' error_message.send(sender=None, text=e) else: if data: parse_errata(bunzip2(data), force)
def update_rdns(host): """ Update the reverse DNS for a host """ try: reversedns = str(gethostbyaddr(host.ipaddress)[0]) except (gaierror, herror): reversedns = 'None' host.reversedns = reversedns.lower() try: host.save() except DatabaseError as e: error_message.send(sender=None, text=e)
def checksum_is_valid(sha, checksum, mirror): """ Compares the computed checksum and the provided checksum. Returns True if both match. """ if sha == checksum: return True else: text = 'Checksum failed for mirror {0!s}'.format(mirror.id) text += ', not refreshing package metadata' error_message.send(sender=None, text=text) text = 'Found sha = {0!s}\nExpected = {1!s}'.format(sha, checksum) error_message.send(sender=None, text=text) mirror.last_access_ok = False return False
def mirror_checksum_is_valid(computed, provided, mirror): """ Compares the computed checksum and the provided checksum. Returns True if both match. """ if not computed or computed != provided: text = 'Checksum failed for mirror {0!s}'.format(mirror.id) text += ', not refreshing package metadata' error_message.send(sender=None, text=text) text = 'Found checksum: {0!s}\nExpected checksum: {1!s}'.format( computed, provided) error_message.send(sender=None, text=text) mirror.last_access_ok = False mirror.fail() return False else: return True
def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: host_repos = \ Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False # If any of the containing repos are security, # mark the update as security for mirror in mirrors: if mirror.repo.security: security = True updates = PackageUpdate.objects.all() # see if any version of this update exists # if it's already marked as a security update, leave it that way # if not, mark it as a security update # this could be an issue if different distros mark the same update # in different ways (security vs bugfix) but in reality this is not # very likely to happen. if it does, we err on the side of caution # and mark it as the security update try: update = updates.get(oldpackage=package, newpackage=highest_package) except PackageUpdate.DoesNotExist: update = None try: if update: if security and not update.security: update.security = True with transaction.atomic(): update.save() else: with transaction.atomic(): update, c = updates.get_or_create( oldpackage=package, newpackage=highest_package, security=security) except IntegrityError as e: error_message.send(sender=None, text=e) update = updates.get(oldpackage=package, newpackage=highest_package, security=security) except DatabaseError as e: error_message.send(sender=None, text=e) try: with transaction.atomic(): self.updates.add(update) info_message.send(sender=None, text='{0!s}'.format(update)) return update.id except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e)
def update(self, force=False): """ Update all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ if force: for mirror in self.mirror_set.all(): mirror.file_checksum = None mirror.save() if not self.auth_required: if self.repotype == Repository.DEB: update_deb_repo(self) elif self.repotype == Repository.RPM: update_rpm_repo(self) else: error_message.send(sender=None, text='Error: unknown repo type for repo %s: %s\n' % (self.id, self.repotype)) else: info_message.send(sender=None, text='Repo requires certificate authentication, not updating\n')
def get_url(url): try: req = Request(url) res = urlopen(req) # don't blindly succeed with http 200 (e.g. sourceforge) headers = dict(res.headers.items()) if 'content-type' in headers and not re.match('text/html', headers['content-type']): return res else: return -1 except IOError, e: if hasattr(e, 'reason'): debug_message.send(sender=None, text='%s - %s\n' % (url, e.reason)) return -1 elif hasattr(e, 'code'): debug_message.send(sender=None, text='%s - %s\n' % (url, e)) return e.code else: error_message.send(sender=None, text='Unknown error: %s - %e\n' % (url, e)) return -1
def refresh(self, force=False): """ Refresh all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ if force: for mirror in self.mirror_set.all(): mirror.file_checksum = None mirror.save() if not self.auth_required: if self.repotype == Repository.DEB: refresh_deb_repo(self) elif self.repotype == Repository.RPM: refresh_rpm_repo(self) else: text = 'Error: unknown repo type for repo ' text += '{0!s}: {1!s}'.format(self.id, self.repotype) error_message.send(sender=None, text=text) else: text = 'Repo requires certificate authentication, not updating' warning_message.send(sender=None, text=text)
def get_or_create_package(name, epoch, version, release, arch, p_type): """ Get or create a Package object. Returns the object. Returns None if the package is the pseudo package gpg-pubkey, or if it cannot create it """ package = None name = name.lower() if name == 'gpg-pubkey': return if epoch in [None, 0, '0']: epoch = '' try: with transaction.atomic(): package_names = PackageName.objects.all() p_name, c = package_names.get_or_create(name=name) except IntegrityError as e: error_message.send(sender=None, text=e) p_name = package_names.get(name=name) except DatabaseError as e: error_message.send(sender=None, text=e) package_arches = PackageArchitecture.objects.all() with transaction.atomic(): p_arch, c = package_arches.get_or_create(name=arch) try: with transaction.atomic(): packages = Package.objects.all() package, c = packages.get_or_create(name=p_name, arch=p_arch, epoch=epoch, version=version, release=release, packagetype=p_type) except IntegrityError as e: error_message.send(sender=None, text=e) package = packages.get(name=p_name, arch=p_arch, epoch=epoch, version=version, release=release, packagetype=p_type) except DatabaseError as e: error_message.send(sender=None, text=e) return package
def get_url(url): """ Perform a http GET on a URL. Return None on error. """ res = None try: res = requests.get(url, stream=True) except requests.exceptions.Timeout: error_message.send(sender=None, text='Timeout - {0!s}'.format(url)) except requests.exceptions.TooManyRedirects: error_message.send(sender=None, text='Too many redirects - {0!s}'.format(url)) except requests.exceptions.RequestException as e: error_message.send(sender=None, text='Error ({0!s}) - {1!s}'.format(e, url)) return res
def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: host_repos = \ Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False # If any of the containing repos are security, # mark the update as security for mirror in mirrors: if mirror.repo.security: security = True try: updates = PackageUpdate.objects.all() with transaction.atomic(): update, c = updates.get_or_create( oldpackage=package, newpackage=highest_package, security=security) except IntegrityError as e: error_message.send(sender=None, text=e) update = updates.get(oldpackage=package, newpackage=highest_package, security=security) except DatabaseError as e: error_message.send(sender=None, text=e) try: with transaction.atomic(): self.updates.add(update) info_message.send(sender=None, text='{0!s}'.format(update)) return update.id except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e)
def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch and not self.processed: oses = OS.objects.all() with transaction.atomic(): os, c = oses.get_or_create(name=self.os) machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): arch, c = machine_arches.get_or_create(name=self.arch) if not self.domain: self.domain = 'unknown' domains = Domain.objects.all() with transaction.atomic(): domain, c = domains.get_or_create(name=self.domain) if not self.host: try: self.host = str(gethostbyaddr(self.report_ip)[0]) except: self.host = self.report_ip hosts = Host.objects.all() with transaction.atomic(): host, c = hosts.get_or_create( hostname=self.host, defaults={ 'ipaddress': self.report_ip, 'arch': arch, 'os': os, 'domain': domain, 'lastreport': self.created, }) host.ipaddress = self.report_ip host.kernel = self.kernel host.arch = arch host.os = os host.domain = domain host.lastreport = self.created host.tags = self.tags if self.reboot == 'True': host.reboot_required = True else: host.reboot_required = False try: with transaction.atomic(): host.save() except IntegrityError as e: print e except DatabaseError as e: print e host.check_rdns() if verbose: print 'Processing report %s - %s' % (self.id, self.host) from patchman.reports.utils import process_packages, \ process_repos, process_updates with transaction.atomic(): process_repos(report=self, host=host) with transaction.atomic(): process_packages(report=self, host=host) with transaction.atomic(): process_updates(report=self, host=host) self.processed = True with transaction.atomic(): self.save() if find_updates: if verbose: print 'Finding updates for report %s - %s' % \ (self.id, self.host) host.find_updates() else: if self.processed: text = 'Report %s has already been processed\n' % (self.id) info_message.send(sender=None, text=text) else: text = 'Error: OS, kernel or arch not sent with report %s\n' \ % (self.id) error_message.send(sender=None, text=text)
else: return -1 except IOError, e: if hasattr(e, 'reason'): debug_message.send(sender=None, text='%s - %s\n' % (url, e.reason)) return -1 elif hasattr(e, 'code'): debug_message.send(sender=None, text='%s - %s\n' % (url, e)) return e.code else: text = 'Unknown error: %s - %s\n' % (url, e) error_message.send(sender=None, text=text) return -1 except httplib.BadStatusLine, e: text = 'http bad status line: %s - %s\n' % (url, e.line) error_message.send(sender=None, text=text) return -1 def find_mirror_url(stored_mirror_url, formats): """ Find the actual URL of the mirror by trying predefined paths """ yast = False for fmt in formats: mirror_url = stored_mirror_url for f in formats: if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] mirror_url = mirror_url.rstrip('/') + '/' + fmt
def process_package(pkg, protocol): """ Processes a single sanitized package string and converts to a package object """ if protocol == '1': # ignore gpg-pupbkey pseudo packages name = pkg[0].lower() if name == 'gpg-pubkey': return try: with transaction.atomic(): package_names = PackageName.objects.all() p_name, c = package_names.get_or_create(name=name) except IntegrityError as e: error_message.send(sender=None, text=e) p_name = package_names.get(name=name) except DatabaseError as e: error_message.send(sender=None, text=e) if pkg[4] != '': arch = pkg[4] else: arch = 'unknown' package_arches = PackageArchitecture.objects.all() with transaction.atomic(): p_arch, c = package_arches.get_or_create(name=arch) p_epoch = p_version = p_release = '' if pkg[1]: p_epoch = pkg[1] if pkg[1] != '0': p_epoch = pkg[1] if pkg[2]: p_version = pkg[2] if pkg[3]: p_release = pkg[3] p_type = Package.UNKNOWN if pkg[5] == 'deb': p_type = Package.DEB if pkg[5] == 'rpm': p_type = Package.RPM try: with transaction.atomic(): packages = Package.objects.all() package, c = packages.get_or_create(name=p_name, arch=p_arch, epoch=p_epoch, version=p_version, release=p_release, packagetype=p_type) return package except IntegrityError as e: error_message.send(sender=None, text=e) package = packages.get(name=p_name, arch=p_arch, epoch=p_epoch, version=p_version, release=p_release, packagetype=p_type) return package except DatabaseError as e: error_message.send(sender=None, text=e)
def process_repo(repo, arch): """ Processes a single sanitized repo string and converts to a repo object """ repository = r_id = None if repo[2] == '': r_priority = 0 if repo[0] == 'deb': r_type = Repository.DEB r_priority = int(repo[2]) elif repo[0] == 'rpm': r_type = Repository.RPM r_id = repo.pop(2) r_priority = int(repo[2]) * -1 if repo[1]: r_name = repo[1] machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): r_arch, c = machine_arches.get_or_create(name=arch) unknown = [] for r_url in repo[3:]: try: mirror = Mirror.objects.get(url=r_url) except Mirror.DoesNotExist: if repository: Mirror.objects.create(repo=repository, url=r_url) else: unknown.append(r_url) else: repository = mirror.repo if not repository: repositories = Repository.objects.all() try: with transaction.atomic(): repository, c = repositories.get_or_create(name=r_name, arch=r_arch, repotype=r_type) except IntegrityError as e: error_message.send(sender=None, text=e) repository = repositories.get(name=r_name, arch=r_arch, repotype=r_type) except DatabaseError as e: error_message.send(sender=None, text=e) if r_id and repository.repo_id != r_id: repository.repo_id = r_id with transaction.atomic(): repository.save() for url in unknown: Mirror.objects.create(repo=repository, url=url) for mirror in Mirror.objects.filter(repo=repository).values('url'): if mirror['url'].find('cdn.redhat.com') != -1 or \ mirror['url'].find('nu.novell.com') != -1: repository.auth_required = True with transaction.atomic(): repository.save() if mirror['url'].startswith('http://security') or \ mirror['url'].startswith('https://security'): repository.security = True with transaction.atomic(): repository.save() return repository, r_priority
def process_repo(repo, arch): """ Processes a single sanitized repo string and converts to a repo object """ repository = r_id = None if repo[0] == 'deb': r_type = Repository.DEB r_priority = int(repo[2]) elif repo[0] == 'rpm': r_type = Repository.RPM r_id = repo.pop(2) r_priority = int(repo[2]) * -1 elif repo[0] == 'arch': r_type = Repository.ARCH r_id = repo[2] r_priority = 0 if repo[1]: r_name = repo[1] machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): r_arch, c = machine_arches.get_or_create(name=arch) unknown = [] for r_url in repo[3:]: try: mirror = Mirror.objects.get(url=r_url) except Mirror.DoesNotExist: if repository: Mirror.objects.create(repo=repository, url=r_url) else: unknown.append(r_url) else: repository = mirror.repo if not repository: repositories = Repository.objects.all() try: with transaction.atomic(): repository, c = repositories.get_or_create(name=r_name, arch=r_arch, repotype=r_type) except IntegrityError as e: error_message.send(sender=None, text=e) repository = repositories.get(name=r_name, arch=r_arch, repotype=r_type) except DatabaseError as e: error_message.send(sender=None, text=e) if r_id and repository.repo_id != r_id: repository.repo_id = r_id with transaction.atomic(): repository.save() for url in unknown: Mirror.objects.create(repo=repository, url=url) for mirror in Mirror.objects.filter(repo=repository).values('url'): if mirror['url'].find('cdn.redhat.com') != -1 or \ mirror['url'].find('nu.novell.com') != -1 or \ mirror['url'].find('updates.suse.com') != -1: repository.auth_required = True with transaction.atomic(): repository.save() if mirror['url'].find('security') != -1: repository.security = True with transaction.atomic(): repository.save() return repository, r_priority
def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch and not self.processed: oses = OS.objects.all() with transaction.atomic(): os, c = oses.get_or_create(name=self.os) machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): arch, c = machine_arches.get_or_create(name=self.arch) if not self.domain: self.domain = 'unknown' domains = Domain.objects.all() with transaction.atomic(): domain, c = domains.get_or_create(name=self.domain) if not self.host: try: self.host = str(gethostbyaddr(self.report_ip)[0]) except herror: self.host = self.report_ip hosts = Host.objects.all() with transaction.atomic(): host, c = hosts.get_or_create(hostname=self.host, defaults={ 'ipaddress': self.report_ip, 'arch': arch, 'os': os, 'domain': domain, 'lastreport': self.created, }) host.ipaddress = self.report_ip host.kernel = self.kernel host.arch = arch host.os = os host.domain = domain host.lastreport = self.created host.tags = self.tags if self.reboot == 'True': host.reboot_required = True else: host.reboot_required = False try: with transaction.atomic(): host.save() except IntegrityError as e: error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) host.check_rdns() if verbose: text = 'Processing report ' text += '{0!s} - {1!s}'.format(self.id, self.host) info_message.send(sender=None, text=text) from reports.utils import process_packages, \ process_repos, process_updates with transaction.atomic(): process_repos(report=self, host=host) with transaction.atomic(): process_packages(report=self, host=host) with transaction.atomic(): process_updates(report=self, host=host) self.processed = True with transaction.atomic(): self.save() if find_updates: if verbose: text = 'Finding updates for report ' text += '{0!s} - {1!s}'.format(self.id, self.host) info_message.send(sender=None, text=text) host.find_updates() else: if self.processed: text = 'Report {0!s} '.format(self.id) text += 'has already been processed' info_message.send(sender=None, text=text) else: text = 'Error: OS, kernel or arch not sent ' text += 'with report {0!s}'.format(self.id) error_message.send(sender=None, text=text)