def _get_erratas(self, api_version: int, pkg_id: int, modified_since: datetime.datetime, third_party: bool) -> tuple: erratas = [] modified_found = False if pkg_id in self.cache.pkgid2errataids: errata_ids = self.cache.pkgid2errataids[pkg_id] for err_id in errata_ids: name = self.cache.errataid2name[err_id] detail = self.cache.errata_detail[name] if detail[ERRATA_THIRD_PARTY] and not third_party: continue issued = detail[ERRATA_ISSUED] errata = { 'name': name, 'issued': none2empty(format_datetime(issued)) } if api_version >= 3: updated_ts = detail[ERRATA_UPDATED] errata['updated'] = none2empty(format_datetime(updated_ts)) modified_found = self._update_modified_found( modified_found, modified_since, updated_ts) cves = detail[ERRATA_CVE] if cves: errata['cve_list'] = natsorted(cves) erratas.append(errata) erratas = natsorted(erratas, key=lambda err_dict: err_dict['name']) return erratas, modified_found
def process_list(self, api_version: int, data: dict): # pylint: disable=unused-argument """ Returns list of NEVRAs for given packge name. :param data: json request parsed into data structure :param api_version: API version (1, 2, 3). :returns: json response with list of NEVRAs """ page = data.get("page", None) page_size = data.get("page_size", None) opts = dict( modified_since=parse_datetime(data.get("modified_since", None)), return_repositories=data.get("return_repositories", True), return_errata=data.get("return_errata", True), return_summary=data.get("return_summary", False), return_description=data.get("return_description", False), third_party=data.get("third_party", False), ) names = data.get('package_name_list', None) if not names: return {} names = self.try_expand_by_regex(api_version, names) names, response = self._use_pagination(api_version, names, page, page_size) package_name_list = self._build_package_name_list( api_version, names, opts) response['package_name_list'] = package_name_list # Date and time of last data change in the VMaaS DB response['last_change'] = format_datetime( self.cache.dbchange['last_change']) return response
def _get_first_published_from_erratas(erratas): # pylint: disable=R0201 # 'first_published' is the 'issued' date of the oldest errata. first_published = None for ert in erratas: issued = parse_dt(ert['issued']) if first_published is None or issued < first_published: first_published = issued return format_datetime(first_published)
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns RPM names for given SRPM list filtered by content set :param data: data from api - SRPM name list and content set :returns: list of RPM names for given content set and SRPM """ srpm_list = data.get('srpm_name_list', []) content_set_list = data.get('content_set_list', []) response = {} if not srpm_list: return response response['last_change'] = format_datetime( self.cache.dbchange['last_change']) rpm_data = {} for srpm in srpm_list: if srpm in self.cache.packagename2id: src_pkg_name_id = self.cache.packagename2id[srpm] content_set_ids = self._get_content_set_ids(src_pkg_name_id) src_pkg_ids2names = { pkg_id: src_pkg_name_id for (name_id, _, _), pkg_id in self.cache.nevra2pkgid.items() if src_pkg_name_id == name_id and pkg_id in self.cache.src_pkg_id2pkg_ids } src2pkgid = {} for src_pkg_id in src_pkg_ids2names: src2pkgid.setdefault(src_pkg_id, set()).update( self.cache.src_pkg_id2pkg_ids[src_pkg_id]) content_set_labels = self._get_content_set_labels( content_set_ids, content_set_list) label2name_ids = self._process_content_set(content_set_labels) pkg_ids = set() for pkg in src2pkgid.values(): pkg_ids.update(pkg) label2pkg_name_filtered = {} for label in content_set_labels: pkg_names = set(self.cache.id2packagename[ self.cache.package_details[pid][PKG_NAME_ID]] for pid in pkg_ids if self.cache.package_details[pid] [PKG_NAME_ID] in label2name_ids[label]) label2pkg_name_filtered.setdefault(label, []).extend( natsorted(pkg_names)) if src_pkg_name_id in src_pkg_ids2names.values(): rpm_data.setdefault(srpm, {}).update(label2pkg_name_filtered) response['srpm_name_list'] = rpm_data return response
def process(self): """ This method returns details of last-processed-time from the VMaaS DB :returns: dictionary of errata_changes/cve_changes/repository_changes/last_change timestamps """ answer = {} for key, value in self.cache.dbchange.items(): answer[key] = format_datetime(value) return answer
def process_list(self, api_version, data): # pylint: disable=unused-argument """Return list of potential security issues""" data['security_only'] = False updates = self.updates_api.process_list(3, data) errata_list = set() for package in updates['update_list']: for update in updates['update_list'][package].get( 'available_updates', []): errata_list.add(update['erratum']) return { 'errata_list': list(errata_list), 'last_change': format_datetime(self.db_cache.dbchange['last_change']) }
def process_list(self, api_version: int, data: dict) -> dict: """ This method is looking for updates of a package, including name of package to update to, associated erratum and repository this erratum is from. :param api_version: API version of the function :param data: input json, must contain package_list to find updates for them :returns: json with updates_list as a list of dictionaries {'package': <p_name>, 'erratum': <e_name>, 'repository': <r_label>} """ # Return empty update list in case of empty input package list packages_to_process, update_list = self.process_input_packages(data) response = { 'update_list': update_list, 'last_change': format_datetime(self.db_cache.dbchange['last_change']) } if len(packages_to_process) == 0: return response repository_list, repo_ids = self._get_repository_list(data) response = insert_if_not_empty(response, 'repository_list', repository_list) releasever, repo_ids = self._get_releasever(data, repo_ids) response = insert_if_not_empty(response, 'releasever', releasever) # Get list of valid repository IDs based on input paramaters basearch, available_repo_ids = self._get_basearch(data, repo_ids) response = insert_if_not_empty(response, 'basearch', basearch) modules_list, module_ids = self._get_modules_list(data) response = insert_if_not_empty(response, 'modules_list', modules_list) # Backward compatibility of older APIs security_only = get_security_only(api_version, data) third_party = data.get('third_party', False) # Process updated packages, errata and fill the response optimistic_updates = data.get('optimistic_updates', False) update_list = self._process_updates(api_version, update_list, packages_to_process, available_repo_ids, module_ids, security_only, optimistic_updates, third_party) response['update_list'] = update_list return response
def _get_repositories(self, pkg_id) -> tuple: # FIXME Add support for modules and streams. repos = [] third_party_flags = [] if pkg_id in self.cache.pkgid2repoids: for repo_id in self.cache.pkgid2repoids[pkg_id]: detail = self.cache.repo_detail[repo_id] third_party_flags.append(detail[REPO_THIRD_PARTY]) repos.append({ 'label': detail[REPO_LABEL], 'name': detail[REPO_NAME], 'basearch': none2empty(detail[REPO_BASEARCH]), 'releasever': none2empty(detail[REPO_RELEASEVER]), 'revision': format_datetime(detail[REPO_REVISION]) }) # Check whether all found repositories are third-party third_party_only = (len(third_party_flags) > 0) and (False not in third_party_flags) return natsorted( repos, key=lambda repo_dict: repo_dict['label']), third_party_only
def _build_package_list(self, package_ids: list, opts): package_list = [] for package_id in package_ids: pkg = {} pkg_detail = self.cache.package_details[package_id] pkg['nevra'] = utils.pkg_detail2nevra(self.cache, pkg_detail) pkg['description'] = self.cache.strings.get( pkg_detail[PKG_DESC_ID], None) pkg['summary'] = self.cache.strings.get(pkg_detail[PKG_SUMMARY_ID], None) package_list.append(pkg) if opts['return_modified']: # For debugging enable to return "modified" value for each package for i, package_id in enumerate(package_ids): pkg_detail = self.cache.package_details[package_id] package_list[i]['modified'] = format_datetime( datetime.datetime.fromtimestamp( pkg_detail[PKG_MODIFIED_ID], tz=UTC)) return package_list
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns content sets for given RPM list filtered by content set :param data: data from api - RPM name list and content set :returns: list of content sets for given content set and RPM list """ rpm_list = data.get('rpm_name_list', []) content_set_list = data.get('content_set_list', []) response = {} if not rpm_list: return response response['last_change'] = format_datetime(self.cache.dbchange['last_change']) content_data = {} for rpm in set(rpm_list): content_set_labels = [] if rpm in self.cache.packagename2id: pkg_name_id = self.cache.packagename2id[rpm] content_set_ids = self._get_content_set_ids(pkg_name_id) content_set_labels.extend(self._get_content_set_labels(content_set_ids, content_set_list)) content_data.setdefault(rpm, []).extend(natsorted(content_set_labels)) response['rpm_name_list'] = content_data return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ Returns info about all packages. :param data: json request parsed into data structure :returns: json response with package details """ page = data.get("page", None) page_size = data.get("page_size", None) opts = dict(return_modified=data.get("return_modified", False), ) modified_since = parse_datetime(data.get("modified_since", None)) modified_since_int = self.modify_since_dt2int(modified_since) package_ids = self._get_package_ids(modified_since_int) page_package_ids, response = paginate(package_ids, page, page_size, sort_input=False) package_list = self._build_package_list(page_package_ids, opts) response['package_list'] = package_list response['last_change'] = utils.format_datetime( self.cache.dbchange['last_change']) response['total'] = len(package_ids) return response
def process_list(self, api_version, data): """Return list of potential security issues""" strip_prefixes(data, REPO_PREFIXES) data[ "optimistic_updates"] = True # find updates even if original package is not found in repo extended = data.get("extended", False) cve_dict = {} manually_fixable_cve_dict = {} unpatched_cve_dict = {} # Repositories updates = self.updates_api.process_list(2, data) for package in updates['update_list']: for update in updates['update_list'][package].get( 'available_updates', []): for cve in self.db_cache.errata_detail[ update['erratum']][ERRATA_CVE]: cve_dict.setdefault(cve, {})["cve"] = cve cve_dict[cve].setdefault("affected_packages", set()).add(package) cve_dict[cve].setdefault("errata", set()).add(update['erratum']) # OVAL # TODO: re-factor, double parsing input packages packages_to_process, _ = self.updates_api.process_input_packages(data) modules_list = { f"{x['module_name']}:{x['module_stream']}" for x in data.get('modules_list', []) } # Get CPEs for affected repos/content sets # TODO: currently OVAL doesn't evaluate when there is not correct input repo list mapped to CPEs # there needs to be better fallback at least to guess correctly RHEL version, # use old VMaaS repo guessing? candidate_definitions = self._repos_to_definitions( data.get('repository_list', []), data.get('basearch'), data.get('releasever')) for package, parsed_package in packages_to_process.items(): name, epoch, ver, rel, arch = parsed_package["parsed_nevra"] package_name_id = self.db_cache.packagename2id[name] definition_ids = candidate_definitions.intersection( self.db_cache.packagename_id2definition_ids.get( package_name_id, [])) #LOGGER.info("OVAL definitions found for package_name=%s, count=%s", name, len(definition_ids)) for definition_id in definition_ids: definition_type, criteria_id = self.db_cache.ovaldefinition_detail[ definition_id] # Skip if unfixed CVE feature flag is disabled if definition_type == OVAL_DEFINITION_TYPE_VULNERABILITY and not CFG.oval_unfixed_eval_enabled: continue cves = self.db_cache.ovaldefinition_id2cves.get( definition_id, []) # Skip if all CVEs from definition were already found somewhere if not [ cve for cve in cves if cve not in cve_dict and cve not in manually_fixable_cve_dict and cve not in unpatched_cve_dict ]: continue if self._evaluate_criteria( criteria_id, (package_name_id, epoch, ver, rel, arch), modules_list): # Vulnerable #LOGGER.info("Definition id=%s, type=%s matched! Adding CVEs.", definition_id, definition_type) if definition_type == OVAL_DEFINITION_TYPE_PATCH: for cve in cves: # Skip CVEs found in repos if cve in cve_dict: continue manually_fixable_cve_dict.setdefault( cve, {})["cve"] = cve manually_fixable_cve_dict[cve].setdefault( "affected_packages", set()).add(package) # no erratum directly mappable to CVE in OVAL manually_fixable_cve_dict[cve].setdefault( "errata", set()) elif definition_type == OVAL_DEFINITION_TYPE_VULNERABILITY: for cve in cves: # Skip fixable CVEs (should never happen, just in case) if cve in cve_dict or cve in manually_fixable_cve_dict: continue unpatched_cve_dict.setdefault(cve, {})["cve"] = cve unpatched_cve_dict[cve].setdefault( "affected_packages", set()).add(package) # no erratum for unpatched CVEs unpatched_cve_dict[cve].setdefault("errata", set()) else: raise ValueError("Unsupported definition type: %s" % definition_type) return { 'cve_list': self._serialize_dict(cve_dict, extended=extended), 'manually_fixable_cve_list': self._serialize_dict(manually_fixable_cve_dict, extended=extended), 'unpatched_cve_list': self._serialize_dict(unpatched_cve_dict, extended=extended), 'last_change': format_datetime(self.db_cache.dbchange['last_change']) }
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of CVEs. :param data: data obtained from api, we're interested in data["cve_list"] :returns: list of dictionaries containing detailed information for given cve list} """ cves_to_process = data.get("cve_list", None) modified_since = data.get("modified_since", None) published_since = data.get("published_since", None) rh_only = data.get('rh_only', False) errata_only = data.get('errata_associated', False) modified_since_dt = parse_datetime(modified_since) published_since_dt = parse_datetime(published_since) page = data.get("page", None) page_size = data.get("page_size", None) answer = {} if not cves_to_process: return answer cves_to_process = list(filter(None, cves_to_process)) cves_to_process = self.try_expand_by_regex(cves_to_process) filters = [(filter_item_if_exists, [self.cache.cve_detail])] if rh_only: filters.append((self._filter_redhat_only, [])) if errata_only: filters.append((self._filter_errata_only, [])) # if we have information about modified/published dates and receive "modified_since" # or "published_since" in request, # compare the dates if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) if published_since: filters.append((self._filter_published_since, [published_since_dt])) cve_list = {} cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size, filters=filters) for cve in cve_page_to_process: cve_detail = self.cache.cve_detail.get(cve, None) if not cve_detail: continue bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, cve_detail[CVE_PID]) cve_list[cve] = { "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]), "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]), "synopsis": cve, "impact": none2empty(cve_detail[CVE_IMPACT]), "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])), "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])), "cwe_list": none2empty(cve_detail[CVE_CWE]), "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])), "cvss3_metrics": str(none2empty(cve_detail[CVE_CVSS3_METRICS])), "cvss2_score": str(none2empty(cve_detail[CVE_CVSS2_SCORE])), "cvss2_metrics": str(none2empty(cve_detail[CVE_CVSS2_METRICS])), "description": none2empty(cve_detail[CVE_DESCRIPTION]), "package_list": bin_pkg_list, "source_package_list": src_pkg_list, "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]], } response = {"cve_list": cve_list} response.update(pagination_response) response['last_change'] = format_datetime(self.cache.dbchange['last_change']) return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ Returns package details. :param data: json request parsed into data structure :returns: json response with package details """ packages = data.get('package_list', None) # By default, don't include third party data want_third_party = data.get('third_party', False) packagelist = {} response = { 'last_change': utils.format_datetime(self.cache.dbchange['last_change']) } if not packages: response['package_list'] = packagelist return response for pkg in packages: packagedata = packagelist.setdefault(pkg, {}) is_third_party = False name, epoch, ver, rel, arch = parse_rpm_name(pkg, default_epoch='0') if name in self.cache.packagename2id \ and (epoch, ver, rel) in self.cache.evr2id \ and arch in self.cache.arch2id: name_id = self.cache.packagename2id[name] evr_id = self.cache.evr2id[(epoch, ver, rel)] arch_id = self.cache.arch2id[arch] pkg_id = self.cache.nevra2pkgid.get((name_id, evr_id, arch_id), None) if pkg_id: pkg_detail = self.cache.package_details[pkg_id] packagedata['summary'] = self.cache.strings.get( pkg_detail[PKG_SUMMARY_ID], None) packagedata['description'] = self.cache.strings.get( pkg_detail[PKG_DESC_ID], None) packagedata['source_package'] = self._get_source_package( pkg_detail) packagedata['repositories'] = [] packagedata[ 'package_list'] = self._get_built_binary_packages( pkg_id) if pkg_id in self.cache.pkgid2repoids: for repo_id in self.cache.pkgid2repoids[pkg_id]: repodetail = self.cache.repo_detail[repo_id] is_third_party = is_third_party or bool( repodetail[REPO_THIRD_PARTY]) repodata = { 'label': repodetail[REPO_LABEL], 'name': repodetail[REPO_NAME], 'basearch': utils.none2empty(repodetail[REPO_BASEARCH]), 'releasever': utils.none2empty(repodetail[REPO_RELEASEVER]), } packagedata['repositories'].append(repodata) # If the package is third party, then remove it from result if not want_third_party and is_third_party: packagelist[pkg] = {} response['package_list'] = packagelist return response
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of Errata. :param data: data obtained from api, we're interested in data["errata_list"] :returns: dictionary containing detailed information for given errata list} """ modified_since = data.get("modified_since", None) modified_since_dt = parse_datetime(modified_since) third_party = data.get("third_party", False) errata_to_process = data.get("errata_list", None) page = data.get("page", None) page_size = data.get("page_size", None) errata_type = data.get("type", None) severity = data.get("severity", []) response = {"errata_list": {}, "last_change": format_datetime(self.cache.dbchange["last_change"])} filters = [(filter_item_if_exists, [self.cache.errata_detail]), (self._filter_third_party, [third_party])] if modified_since: # if we have information about modified/published dates and receive "modified_since" in request, # compare the dates filters.append((self._filter_modified_since, [modified_since_dt])) if errata_type: errata_type = [t.lower() for t in set(errata_type)] \ if isinstance(errata_type, list) else [errata_type.lower()] response["type"] = errata_type filters.append((self._filter_errata_by_prop, ["type", errata_type])) if severity is None or len(severity) != 0: severity = self._prepare_severity(severity) response["severity"] = severity filters.append((self._filter_errata_by_prop, ["severity", severity])) if not errata_to_process: return response errata_to_process = self.try_expand_by_regex(errata_to_process) errata_list = {} errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters) for errata in errata_page_to_process: errata_detail = self.cache.errata_detail.get(errata, None) if not errata_detail: continue bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS]) releasevers = self._errata_releasevers(errata_detail[ERRATA_ID]) if errata_detail[ERRATA_MODULE]: for index, module_update in enumerate(errata_detail[ERRATA_MODULE]): if all(str(elem).isdigit() for elem in errata_detail[ERRATA_MODULE][index]["package_list"]): module_pkg_list, module_src_pkg_list = pkgidlist2packages( self.cache, module_update["package_list"]) errata_detail[ERRATA_MODULE][index]["package_list"] = module_pkg_list errata_detail[ERRATA_MODULE][index]["source_package_list"] = module_src_pkg_list errata_list[errata] = { "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]), "summary": none2empty(errata_detail[ERRATA_SUMMARY]), "type": none2empty(errata_detail[ERRATA_TYPE]), "severity": errata_detail[ERRATA_SEVERITY], "description": none2empty(errata_detail[ERRATA_DESCRIPTION]), "solution": none2empty(errata_detail[ERRATA_SOLUTION]), "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])), "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])), "cve_list": errata_detail[ERRATA_CVE], "package_list": bin_pkg_list, "source_package_list": src_pkg_list, "bugzilla_list": errata_detail[ERRATA_BUGZILLA], "reference_list": errata_detail[ERRATA_REFERENCE], "modules_list": errata_detail[ERRATA_MODULE], "url": none2empty(errata_detail[ERRATA_URL]), "third_party": errata_detail[ERRATA_THIRD_PARTY], "requires_reboot": errata_detail[ERRATA_REQUIRES_REBOOT], "release_versions": releasevers, } response["errata_list"] = errata_list response.update(pagination_response) return response