def _get_built_binary_packages(self, pkg_id: int) -> list: if pkg_id in self.cache.src_pkg_id2pkg_ids: ids = self.cache.src_pkg_id2pkg_ids[pkg_id] pkgs_list, source_pkgs_list = utils.pkgidlist2packages( self.cache, ids) return pkgs_list + source_pkgs_list return []
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of Errata. :param data: data obtained from api, we're interested in data["errata_list"] :returns: dictionary containing detailed information for given errata list} """ validate(data, JSON_SCHEMA) modified_since = data.get("modified_since", None) modified_since_dt = parse_datetime(modified_since) errata_to_process = data.get("errata_list", None) page = data.get("page", None) page_size = data.get("page_size", None) response = {"errata_list": {}} if modified_since: response["modified_since"] = modified_since if not errata_to_process: return response if len(errata_to_process) == 1: # treat single-label like a regex, get all matching names errata_to_process = self.find_errata_by_regex(errata_to_process[0]) filters = [] # if we have information about modified/published dates and receive "modified_since" in request, # compare the dates if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) errata_list = {} errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters) for errata in errata_page_to_process: errata_detail = self.cache.errata_detail.get(errata, None) if not errata_detail: continue errata_list[errata] = { "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]), "summary": none2empty(errata_detail[ERRATA_SUMMARY]), "type": none2empty(errata_detail[ERRATA_TYPE]), "severity": none2empty(errata_detail[ERRATA_SEVERITY]), "description": none2empty(errata_detail[ERRATA_DESCRIPTION]), "solution": none2empty(errata_detail[ERRATA_SOLUTION]), "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])), "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])), "cve_list": errata_detail[ERRATA_CVE], "package_list": pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS]), "bugzilla_list": errata_detail[ERRATA_BUGZILLA], "reference_list": errata_detail[ERRATA_REFERENCE], "url": none2empty(errata_detail[ERRATA_URL]) } response["errata_list"] = errata_list response.update(pagination_response) return response
def test_pkgidlist2packages(self, load_cache): """Test making NEVRA from package id.""" pkgid_list = [1, 2] nevras = utils.pkgidlist2packages(self.cache, pkgid_list) for nevra in nevras: assert self._is_nevra(nevra)
def process_list(self, data): """ This method returns details for given set of CVEs. :param data: data obtained from api, we're interested in data["cve_list"] :returns: list of dictionaries containing detailed information for given cve list} """ validate(data, JSON_SCHEMA) cves_to_process = data.get("cve_list", None) modified_since = data.get("modified_since", None) modified_since_dt = parse_datetime(modified_since) page = data.get("page", None) page_size = data.get("page_size", None) answer = {} if not cves_to_process: return answer cves_to_process = list(filter(None, cves_to_process)) if len(cves_to_process) == 1: # treat single-label like a regex, get all matching names cves_to_process = self.find_cves_by_regex(cves_to_process[0]) cve_list = {} cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size) for cve in cve_page_to_process: cve_detail = self.cache.cve_detail.get(cve, None) if not cve_detail: continue # if we have information about modified/published dates and receive "modified_since" in request, # compare the dates if modified_since: if cve_detail[CVE_MODIFIED_DATE] and cve_detail[CVE_MODIFIED_DATE] < modified_since_dt: continue elif not cve_detail[CVE_MODIFIED_DATE] and cve_detail[CVE_PUBLISHED_DATE] and \ cve_detail[CVE_PUBLISHED_DATE] < modified_since_dt: continue cve_list[cve] = { "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]), "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]), "synopsis": cve, "impact": none2empty(cve_detail[CVE_IMPACT]), "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])), "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])), "cwe_list": none2empty(cve_detail[CVE_CWE]), "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])), "description": none2empty(cve_detail[CVE_DESCRIPTION]), "package_list": pkgidlist2packages(self.cache, cve_detail[CVE_PID]), "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]], } response = {"cve_list": cve_list} response.update(pagination_response) if modified_since: response["modified_since"] = modified_since return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of CVEs. :param data: data obtained from api, we're interested in data["cve_list"] :returns: list of dictionaries containing detailed information for given cve list} """ validate(data, JSON_SCHEMA) cves_to_process = data.get("cve_list", None) modified_since = data.get("modified_since", None) published_since = data.get("published_since", None) rh_only = data.get('rh_only', False) modified_since_dt = parse_datetime(modified_since) published_since_dt = parse_datetime(published_since) page = data.get("page", None) page_size = data.get("page_size", None) answer = {} if not cves_to_process: return answer cves_to_process = list(filter(None, cves_to_process)) if len(cves_to_process) == 1: # treat single-label like a regex, get all matching names cves_to_process = self.find_cves_by_regex(cves_to_process[0]) filters = [(filter_item_if_exists, [self.cache.cve_detail])] if rh_only: filters.append((self._filter_redhat_only, [])) # if we have information about modified/published dates and receive "modified_since" # or "published_since" in request, # compare the dates if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) if published_since: filters.append((self._filter_published_since, [published_since_dt])) cve_list = {} cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size, filters=filters) for cve in cve_page_to_process: cve_detail = self.cache.cve_detail.get(cve, None) if not cve_detail: continue bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, cve_detail[CVE_PID]) cve_list[cve] = { "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]), "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]), "synopsis": cve, "impact": none2empty(cve_detail[CVE_IMPACT]), "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])), "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])), "cwe_list": none2empty(cve_detail[CVE_CWE]), "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])), "cvss3_metrics": str(none2empty(cve_detail[CVE_CVSS3_METRICS])), "cvss2_score": str(none2empty(cve_detail[CVE_CVSS2_SCORE])), "cvss2_metrics": str(none2empty(cve_detail[CVE_CVSS2_METRICS])), "description": none2empty(cve_detail[CVE_DESCRIPTION]), "package_list": bin_pkg_list, "source_package_list": src_pkg_list, "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]], } response = {"cve_list": cve_list} response.update(pagination_response) if modified_since: response["modified_since"] = modified_since if published_since: response["published_since"] = published_since return response