def _get_erratas(self, api_version: int, pkg_id: int, modified_since: datetime.datetime, third_party: bool) -> tuple: erratas = [] modified_found = False if pkg_id in self.cache.pkgid2errataids: errata_ids = self.cache.pkgid2errataids[pkg_id] for err_id in errata_ids: name = self.cache.errataid2name[err_id] detail = self.cache.errata_detail[name] if detail[ERRATA_THIRD_PARTY] and not third_party: continue issued = detail[ERRATA_ISSUED] errata = { 'name': name, 'issued': none2empty(format_datetime(issued)) } if api_version >= 3: updated_ts = detail[ERRATA_UPDATED] errata['updated'] = none2empty(format_datetime(updated_ts)) modified_found = self._update_modified_found( modified_found, modified_since, updated_ts) cves = detail[ERRATA_CVE] if cves: errata['cve_list'] = natsorted(cves) erratas.append(errata) erratas = natsorted(erratas, key=lambda err_dict: err_dict['name']) return erratas, modified_found
def _get_pkg_errata_updates(self, update_pkg_id: int, errata_id: int, module_ids: set, available_repo_ids: set, valid_releasevers: set, nevra: str, security_only: bool, third_party: bool) -> list: errata_name = self.db_cache.errataid2name[errata_id] errata_detail = self.db_cache.errata_detail[errata_name] # Filter out non-security updates if filter_non_security(errata_detail, security_only): return [] # If we don't want third party content, and current advisory is third party, skip it if not third_party and errata_detail[ERRATA_THIRD_PARTY]: return [] if ((update_pkg_id, errata_id) in self.db_cache.pkgerrata2module and not \ self.db_cache.pkgerrata2module[(update_pkg_id, errata_id)].intersection(module_ids)): return [] repo_ids = self._get_repositories(update_pkg_id, [errata_id], available_repo_ids, valid_releasevers) pkg_errata_updates = [] for repo_id in repo_ids: repo_details = self.db_cache.repo_detail[repo_id] pkg_errata_updates.append({ 'package': nevra, 'erratum': errata_name, 'repository': repo_details[REPO_LABEL], 'basearch': none2empty(repo_details[REPO_BASEARCH]), 'releasever': none2empty(repo_details[REPO_RELEASEVER]) }) return pkg_errata_updates
def _update_repositories(self, pkg_id: int, opts: dict) -> tuple: """Check whether all repos are 'third-party' to exclude package if third-party=True is set.""" if opts["return_repositories"]: repositories, third_party_only = self._get_repositories(pkg_id) return dict( repositories=none2empty(repositories)), third_party_only return {}, False
def _update_errata(self, api_version: int, pkg_id: int, opts: dict, third_party: bool) -> tuple: """Add errata-related data, skip based on modified_since if needed""" data = {} if opts["return_errata"] or opts["modified_since"] is not None: errata, modified = self._get_erratas(api_version, pkg_id, opts["modified_since"], third_party) if self._exclude_not_modified(modified, opts["modified_since"], len(errata)): return None, True if opts["return_errata"]: data["errata"] = none2empty(errata) first_published = self._get_first_published_from_erratas( errata) data["first_published"] = none2empty(first_published) return data, False
def _get_repositories(self, pkg_id) -> tuple: # FIXME Add support for modules and streams. repos = [] third_party_flags = [] if pkg_id in self.cache.pkgid2repoids: for repo_id in self.cache.pkgid2repoids[pkg_id]: detail = self.cache.repo_detail[repo_id] third_party_flags.append(detail[REPO_THIRD_PARTY]) repos.append({ 'label': detail[REPO_LABEL], 'name': detail[REPO_NAME], 'basearch': none2empty(detail[REPO_BASEARCH]), 'releasever': none2empty(detail[REPO_RELEASEVER]), 'revision': format_datetime(detail[REPO_REVISION]) }) # Check whether all found repositories are third-party third_party_only = (len(third_party_flags) > 0) and (False not in third_party_flags) return natsorted( repos, key=lambda repo_dict: repo_dict['label']), third_party_only
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of CVEs. :param data: data obtained from api, we're interested in data["cve_list"] :returns: list of dictionaries containing detailed information for given cve list} """ cves_to_process = data.get("cve_list", None) modified_since = data.get("modified_since", None) published_since = data.get("published_since", None) rh_only = data.get('rh_only', False) errata_only = data.get('errata_associated', False) modified_since_dt = parse_datetime(modified_since) published_since_dt = parse_datetime(published_since) page = data.get("page", None) page_size = data.get("page_size", None) answer = {} if not cves_to_process: return answer cves_to_process = list(filter(None, cves_to_process)) cves_to_process = self.try_expand_by_regex(cves_to_process) filters = [(filter_item_if_exists, [self.cache.cve_detail])] if rh_only: filters.append((self._filter_redhat_only, [])) if errata_only: filters.append((self._filter_errata_only, [])) # if we have information about modified/published dates and receive "modified_since" # or "published_since" in request, # compare the dates if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) if published_since: filters.append((self._filter_published_since, [published_since_dt])) cve_list = {} cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size, filters=filters) for cve in cve_page_to_process: cve_detail = self.cache.cve_detail.get(cve, None) if not cve_detail: continue bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, cve_detail[CVE_PID]) cve_list[cve] = { "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]), "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]), "synopsis": cve, "impact": none2empty(cve_detail[CVE_IMPACT]), "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])), "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])), "cwe_list": none2empty(cve_detail[CVE_CWE]), "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])), "cvss3_metrics": str(none2empty(cve_detail[CVE_CVSS3_METRICS])), "cvss2_score": str(none2empty(cve_detail[CVE_CVSS2_SCORE])), "cvss2_metrics": str(none2empty(cve_detail[CVE_CVSS2_METRICS])), "description": none2empty(cve_detail[CVE_DESCRIPTION]), "package_list": bin_pkg_list, "source_package_list": src_pkg_list, "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]], } response = {"cve_list": cve_list} response.update(pagination_response) response['last_change'] = format_datetime(self.cache.dbchange['last_change']) return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ Returns repository details. :param data: json request parsed into data structure :returns: json response with repository details """ repos = data.get('repository_list', None) strip_prefixes(repos, REPO_PREFIXES) modified_since = data.get('modified_since', None) modified_since_dt = parse_datetime(modified_since) page = data.get("page", None) page_size = data.get("page_size", None) # By default, don't include third party data want_third_party = data.get('third_party', False) repolist = {} if not repos: return repolist filters = [] if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) filters.append((self._filter_third_party, [want_third_party])) repos = self.try_expand_by_regex(repos) repos = list(set(repos)) repo_details = {} for label in repos: for repo_id in self.cache.repolabel2ids.get(label, []): repo_details[label] = self.cache.repo_detail[repo_id] filters.append((filter_item_if_exists, [repo_details])) actual_page_size = 0 repo_page_to_process, pagination_response = paginate(repos, page, page_size, filters=filters) for label in repo_page_to_process: cs_id = self.cache.label2content_set_id[label] for repo_id in self.cache.repolabel2ids.get(label, []): repo_detail = self.cache.repo_detail[repo_id] if not modified_since_dt or self._modified_since( repo_detail, modified_since_dt): if repo_id in self.cache.repo_id2cpe_ids: cpe_ids = self.cache.repo_id2cpe_ids[repo_id] else: cpe_ids = self.cache.content_set_id2cpe_ids.get( cs_id, []) repolist.setdefault(label, []).append({ "label": label, "name": repo_detail[REPO_NAME], "url": repo_detail[REPO_URL], "basearch": none2empty(repo_detail[REPO_BASEARCH]), "releasever": none2empty(repo_detail[REPO_RELEASEVER]), "product": repo_detail[REPO_PRODUCT], "revision": format_datetime(repo_detail[REPO_REVISION]), "cpes": [ self.cache.cpe_id2label[cpe_id] for cpe_id in cpe_ids ], "third_party": repo_detail[REPO_THIRD_PARTY] }) actual_page_size += len(repolist[label]) response = { 'repository_list': repolist, 'last_change': format_datetime(self.cache.dbchange['last_change']) } pagination_response['page_size'] = actual_page_size response.update(pagination_response) return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ Returns package details. :param data: json request parsed into data structure :returns: json response with package details """ packages = data.get('package_list', None) # By default, don't include third party data want_third_party = data.get('third_party', False) packagelist = {} response = { 'last_change': utils.format_datetime(self.cache.dbchange['last_change']) } if not packages: response['package_list'] = packagelist return response for pkg in packages: packagedata = packagelist.setdefault(pkg, {}) is_third_party = False name, epoch, ver, rel, arch = parse_rpm_name(pkg, default_epoch='0') if name in self.cache.packagename2id \ and (epoch, ver, rel) in self.cache.evr2id \ and arch in self.cache.arch2id: name_id = self.cache.packagename2id[name] evr_id = self.cache.evr2id[(epoch, ver, rel)] arch_id = self.cache.arch2id[arch] pkg_id = self.cache.nevra2pkgid.get((name_id, evr_id, arch_id), None) if pkg_id: pkg_detail = self.cache.package_details[pkg_id] packagedata['summary'] = self.cache.strings.get( pkg_detail[PKG_SUMMARY_ID], None) packagedata['description'] = self.cache.strings.get( pkg_detail[PKG_DESC_ID], None) packagedata['source_package'] = self._get_source_package( pkg_detail) packagedata['repositories'] = [] packagedata[ 'package_list'] = self._get_built_binary_packages( pkg_id) if pkg_id in self.cache.pkgid2repoids: for repo_id in self.cache.pkgid2repoids[pkg_id]: repodetail = self.cache.repo_detail[repo_id] is_third_party = is_third_party or bool( repodetail[REPO_THIRD_PARTY]) repodata = { 'label': repodetail[REPO_LABEL], 'name': repodetail[REPO_NAME], 'basearch': utils.none2empty(repodetail[REPO_BASEARCH]), 'releasever': utils.none2empty(repodetail[REPO_RELEASEVER]), } packagedata['repositories'].append(repodata) # If the package is third party, then remove it from result if not want_third_party and is_third_party: packagelist[pkg] = {} response['package_list'] = packagelist return response
def test_none2empty(self): """Test 'None' to "" conversion.""" assert utils.none2empty(None) == ""
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of Errata. :param data: data obtained from api, we're interested in data["errata_list"] :returns: dictionary containing detailed information for given errata list} """ modified_since = data.get("modified_since", None) modified_since_dt = parse_datetime(modified_since) third_party = data.get("third_party", False) errata_to_process = data.get("errata_list", None) page = data.get("page", None) page_size = data.get("page_size", None) errata_type = data.get("type", None) severity = data.get("severity", []) response = {"errata_list": {}, "last_change": format_datetime(self.cache.dbchange["last_change"])} filters = [(filter_item_if_exists, [self.cache.errata_detail]), (self._filter_third_party, [third_party])] if modified_since: # if we have information about modified/published dates and receive "modified_since" in request, # compare the dates filters.append((self._filter_modified_since, [modified_since_dt])) if errata_type: errata_type = [t.lower() for t in set(errata_type)] \ if isinstance(errata_type, list) else [errata_type.lower()] response["type"] = errata_type filters.append((self._filter_errata_by_prop, ["type", errata_type])) if severity is None or len(severity) != 0: severity = self._prepare_severity(severity) response["severity"] = severity filters.append((self._filter_errata_by_prop, ["severity", severity])) if not errata_to_process: return response errata_to_process = self.try_expand_by_regex(errata_to_process) errata_list = {} errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters) for errata in errata_page_to_process: errata_detail = self.cache.errata_detail.get(errata, None) if not errata_detail: continue bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS]) releasevers = self._errata_releasevers(errata_detail[ERRATA_ID]) if errata_detail[ERRATA_MODULE]: for index, module_update in enumerate(errata_detail[ERRATA_MODULE]): if all(str(elem).isdigit() for elem in errata_detail[ERRATA_MODULE][index]["package_list"]): module_pkg_list, module_src_pkg_list = pkgidlist2packages( self.cache, module_update["package_list"]) errata_detail[ERRATA_MODULE][index]["package_list"] = module_pkg_list errata_detail[ERRATA_MODULE][index]["source_package_list"] = module_src_pkg_list errata_list[errata] = { "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]), "summary": none2empty(errata_detail[ERRATA_SUMMARY]), "type": none2empty(errata_detail[ERRATA_TYPE]), "severity": errata_detail[ERRATA_SEVERITY], "description": none2empty(errata_detail[ERRATA_DESCRIPTION]), "solution": none2empty(errata_detail[ERRATA_SOLUTION]), "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])), "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])), "cve_list": errata_detail[ERRATA_CVE], "package_list": bin_pkg_list, "source_package_list": src_pkg_list, "bugzilla_list": errata_detail[ERRATA_BUGZILLA], "reference_list": errata_detail[ERRATA_REFERENCE], "modules_list": errata_detail[ERRATA_MODULE], "url": none2empty(errata_detail[ERRATA_URL]), "third_party": errata_detail[ERRATA_THIRD_PARTY], "requires_reboot": errata_detail[ERRATA_REQUIRES_REBOOT], "release_versions": releasevers, } response["errata_list"] = errata_list response.update(pagination_response) return response