Beispiel #1
0
    def process_list(self, api_version, data): # pylint: disable=unused-argument
        """
        This method returns details for given set of Errata.

        :param data: data obtained from api, we're interested in data["errata_list"]

        :returns: dictionary containing detailed information for given errata list}

        """
        validate(data, JSON_SCHEMA)

        modified_since = data.get("modified_since", None)
        modified_since_dt = parse_datetime(modified_since)
        errata_to_process = data.get("errata_list", None)
        page = data.get("page", None)
        page_size = data.get("page_size", None)

        response = {"errata_list": {}}
        if modified_since:
            response["modified_since"] = modified_since

        if not errata_to_process:
            return response

        if len(errata_to_process) == 1:
            # treat single-label like a regex, get all matching names
            errata_to_process = self.find_errata_by_regex(errata_to_process[0])

        filters = []
        # if we have information about modified/published dates and receive "modified_since" in request,
        # compare the dates
        if modified_since:
            filters.append((self._filter_modified_since, [modified_since_dt]))

        errata_list = {}
        errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters)
        for errata in errata_page_to_process:
            errata_detail = self.cache.errata_detail.get(errata, None)
            if not errata_detail:
                continue

            errata_list[errata] = {
                "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]),
                "summary": none2empty(errata_detail[ERRATA_SUMMARY]),
                "type": none2empty(errata_detail[ERRATA_TYPE]),
                "severity": none2empty(errata_detail[ERRATA_SEVERITY]),
                "description": none2empty(errata_detail[ERRATA_DESCRIPTION]),
                "solution": none2empty(errata_detail[ERRATA_SOLUTION]),
                "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])),
                "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])),
                "cve_list": errata_detail[ERRATA_CVE],
                "package_list": pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS]),
                "bugzilla_list": errata_detail[ERRATA_BUGZILLA],
                "reference_list": errata_detail[ERRATA_REFERENCE],
                "url": none2empty(errata_detail[ERRATA_URL])
                }
        response["errata_list"] = errata_list
        response.update(pagination_response)
        return response
Beispiel #2
0
    def process_list(self, api_version, data):  # pylint: disable=unused-argument
        """
        Returns package details.

        :param data: json request parsed into data structure

        :returns: json response with package details
        """
        validate(data, JSON_SCHEMA)

        packages = data.get('package_list', None)
        packagelist = {}
        if not packages:
            return packagelist

        for pkg in packages:
            packagedata = packagelist.setdefault(pkg, {})
            name, epoch, ver, rel, arch = utils.split_packagename(pkg)
            if name in self.cache.packagename2id \
               and (epoch, ver, rel) in self.cache.evr2id \
               and arch in self.cache.arch2id:
                name_id = self.cache.packagename2id[name]
                evr_id = self.cache.evr2id[(epoch, ver, rel)]
                arch_id = self.cache.arch2id[arch]
                if (name_id, evr_id, arch_id) in self.cache.nevra2pkgid:
                    pkg_id = self.cache.nevra2pkgid[(name_id, evr_id, arch_id)]
                    pkg_detail = self.cache.package_details[pkg_id]
                    packagedata['summary'] = pkg_detail[PKG_SUMMARY]
                    packagedata['description'] = pkg_detail[PKG_DESC]
                    packagedata['source_package'] = self._get_source_package(
                        pkg_detail)
                    packagedata['repositories'] = []
                    packagedata[
                        'package_list'] = self._get_built_binary_packages(
                            pkg_id)
                    if pkg_id in self.cache.pkgid2repoids:
                        for repo_id in self.cache.pkgid2repoids[pkg_id]:
                            repodetail = self.cache.repo_detail[repo_id]
                            repodata = {
                                'label':
                                repodetail[REPO_LABEL],
                                'name':
                                repodetail[REPO_NAME],
                                'basearch':
                                utils.none2empty(repodetail[REPO_BASEARCH]),
                                'releasever':
                                utils.none2empty(repodetail[REPO_RELEASEVER])
                            }
                            packagedata['repositories'].append(repodata)
        response = {'package_list': packagelist}

        return response
Beispiel #3
0
    def process_list(self, api_version, data):  # pylint: disable=unused-argument
        """
        Returns repository details.

        :param data: json request parsed into data structure

        :returns: json response with repository details
        """
        validate(data, JSON_SCHEMA)

        repos = data.get('repository_list', None)
        page = data.get("page", None)
        page_size = data.get("page_size", None)
        repolist = {}
        if not repos:
            return repolist

        if len(repos) == 1:
            # treat single-label like a regex, get all matching names
            repos = self.find_repos_by_regex(repos[0])

        repo_page_to_process, pagination_response = paginate(
            repos, page, page_size)

        for label in repo_page_to_process:
            for repo_id in self.cache.repolabel2ids.get(label, []):
                repo_detail = self.cache.repo_detail[repo_id]
                repolist.setdefault(label, []).append({
                    "label":
                    label,
                    "name":
                    repo_detail[REPO_NAME],
                    "url":
                    repo_detail[REPO_URL],
                    "basearch":
                    none2empty(repo_detail[REPO_BASEARCH]),
                    "releasever":
                    none2empty(repo_detail[REPO_RELEASEVER]),
                    "product":
                    repo_detail[REPO_PRODUCT],
                    "revision":
                    repo_detail[REPO_REVISION]
                })

        response = {
            'repository_list': repolist,
        }
        response.update(pagination_response)

        return response
Beispiel #4
0
 def test_none2empty(self):
     """Test 'None' to "" conversion."""
     assert utils.none2empty(None) == ""
Beispiel #5
0
    def _process_updates(self, packages_to_process, api_version,
                         available_repo_ids, repo_ids_key, response,
                         module_ids):
        # pylint: disable=too-many-branches
        module_filter = module_ids is not None
        for pkg, pkg_dict in packages_to_process.items():
            name, epoch, ver, rel, arch = pkg_dict['parsed_nevra']
            name_id = self.db_cache.packagename2id[name]
            evr_id = self.db_cache.evr2id.get((epoch, ver, rel), None)
            arch_id = self.db_cache.arch2id.get(arch, None)
            current_evr_indexes = self.db_cache.updates_index[name_id].get(
                evr_id, [])

            # Package with given NEVRA not found in cache/DB
            if not current_evr_indexes:
                continue

            current_nevra_pkg_id = None
            for current_evr_index in current_evr_indexes:
                pkg_id = self.db_cache.updates[name_id][current_evr_index]
                current_nevra_arch_id = self.db_cache.package_details[pkg_id][
                    2]
                if current_nevra_arch_id == arch_id:
                    current_nevra_pkg_id = pkg_id
                    break

            # Package with given NEVRA not found in cache/DB
            if not current_nevra_pkg_id:
                continue

            if api_version == 1:
                response['update_list'][pkg]['summary'] = \
                    self.db_cache.package_details[current_nevra_pkg_id][3]
                response['update_list'][pkg]['description'] = \
                    self.db_cache.package_details[current_nevra_pkg_id][4]
            response['update_list'][pkg]['available_updates'] = []

            # No updates found for given NEVRA
            last_version_pkg_id = self.db_cache.updates[name_id][-1]
            if last_version_pkg_id == current_nevra_pkg_id:
                continue

            # Get associated product IDs
            original_package_repo_ids = set()
            original_package_repo_ids.update(
                self.db_cache.pkgid2repoids.get(current_nevra_pkg_id, []))
            product_ids = self._get_related_products(original_package_repo_ids)
            valid_releasevers = self._get_valid_releasevers(
                original_package_repo_ids)

            # Get candidate package IDs
            update_pkg_ids = self.db_cache.updates[name_id][
                current_evr_indexes[-1] + 1:]

            for update_pkg_id in update_pkg_ids:
                # Filter out packages without errata
                if update_pkg_id not in self.db_cache.pkgid2errataids:
                    continue

                # Filter arch compatibility
                updated_nevra_arch_id = self.db_cache.package_details[
                    update_pkg_id][2]
                if (updated_nevra_arch_id != arch_id and updated_nevra_arch_id
                        not in self.db_cache.arch_compat[arch_id]):
                    continue

                errata_ids = self.db_cache.pkgid2errataids.get(
                    update_pkg_id, set())
                nevra = self._build_nevra(update_pkg_id)
                for errata_id in errata_ids:
                    if (module_filter and (update_pkg_id, errata_id)
                            in self.db_cache.pkgerrata2module
                            and not self.db_cache.pkgerrata2module[
                                (update_pkg_id,
                                 errata_id)].intersection(module_ids)):
                        continue
                    repo_ids = self._get_repositories(product_ids,
                                                      update_pkg_id,
                                                      [errata_id],
                                                      available_repo_ids,
                                                      valid_releasevers)
                    for repo_id in repo_ids:
                        repo_details = self.db_cache.repo_detail[repo_id]
                        response['update_list'][pkg][
                            'available_updates'].append({
                                'package':
                                nevra,
                                'erratum':
                                self.db_cache.errataid2name[errata_id],
                                'repository':
                                repo_details[REPO_LABEL],
                                'basearch':
                                none2empty(repo_details[REPO_BASEARCH]),
                                'releasever':
                                none2empty(repo_details[REPO_RELEASEVER])
                            })

            if self.use_hot_cache.upper() == "YES":
                HOT_CACHE_INSERTS.inc()
                self.hot_cache.insert(repo_ids_key + pkg,
                                      response['update_list'][pkg])
Beispiel #6
0
    def process_list(self, data):
        """
        This method returns details for given set of CVEs.

        :param data: data obtained from api, we're interested in data["cve_list"]

        :returns: list of dictionaries containing detailed information for given cve list}

        """
        validate(data, JSON_SCHEMA)

        cves_to_process = data.get("cve_list", None)
        modified_since = data.get("modified_since", None)
        modified_since_dt = parse_datetime(modified_since)
        page = data.get("page", None)
        page_size = data.get("page_size", None)

        answer = {}
        if not cves_to_process:
            return answer

        cves_to_process = list(filter(None, cves_to_process))
        if len(cves_to_process) == 1:
            # treat single-label like a regex, get all matching names
            cves_to_process = self.find_cves_by_regex(cves_to_process[0])

        cve_list = {}
        cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size)
        for cve in cve_page_to_process:
            cve_detail = self.cache.cve_detail.get(cve, None)
            if not cve_detail:
                continue

            # if we have information about modified/published dates and receive "modified_since" in request,
            # compare the dates
            if modified_since:
                if cve_detail[CVE_MODIFIED_DATE] and cve_detail[CVE_MODIFIED_DATE] < modified_since_dt:
                    continue
                elif not cve_detail[CVE_MODIFIED_DATE] and cve_detail[CVE_PUBLISHED_DATE] and \
                                cve_detail[CVE_PUBLISHED_DATE] < modified_since_dt:
                    continue

            cve_list[cve] = {
                "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]),
                "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]),
                "synopsis": cve,
                "impact": none2empty(cve_detail[CVE_IMPACT]),
                "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])),
                "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])),
                "cwe_list": none2empty(cve_detail[CVE_CWE]),
                "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])),
                "description": none2empty(cve_detail[CVE_DESCRIPTION]),
                "package_list": pkgidlist2packages(self.cache, cve_detail[CVE_PID]),
                "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]],

            }
        response = {"cve_list": cve_list}
        response.update(pagination_response)
        if modified_since:
            response["modified_since"] = modified_since
        return response
Beispiel #7
0
    def process_list(self, api_version, data): # pylint: disable=unused-argument
        """
        This method returns details for given set of CVEs.

        :param data: data obtained from api, we're interested in data["cve_list"]

        :returns: list of dictionaries containing detailed information for given cve list}

        """
        validate(data, JSON_SCHEMA)

        cves_to_process = data.get("cve_list", None)
        modified_since = data.get("modified_since", None)
        published_since = data.get("published_since", None)
        rh_only = data.get('rh_only', False)
        modified_since_dt = parse_datetime(modified_since)
        published_since_dt = parse_datetime(published_since)
        page = data.get("page", None)
        page_size = data.get("page_size", None)

        answer = {}
        if not cves_to_process:
            return answer

        cves_to_process = list(filter(None, cves_to_process))
        if len(cves_to_process) == 1:
            # treat single-label like a regex, get all matching names
            cves_to_process = self.find_cves_by_regex(cves_to_process[0])

        filters = [(filter_item_if_exists, [self.cache.cve_detail])]
        if rh_only:
            filters.append((self._filter_redhat_only, []))
        # if we have information about modified/published dates and receive "modified_since"
        # or "published_since" in request,
        # compare the dates
        if modified_since:
            filters.append((self._filter_modified_since, [modified_since_dt]))

        if published_since:
            filters.append((self._filter_published_since, [published_since_dt]))

        cve_list = {}
        cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size, filters=filters)
        for cve in cve_page_to_process:
            cve_detail = self.cache.cve_detail.get(cve, None)
            if not cve_detail:
                continue

            bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, cve_detail[CVE_PID])
            cve_list[cve] = {
                "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]),
                "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]),
                "synopsis": cve,
                "impact": none2empty(cve_detail[CVE_IMPACT]),
                "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])),
                "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])),
                "cwe_list": none2empty(cve_detail[CVE_CWE]),
                "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])),
                "cvss3_metrics": str(none2empty(cve_detail[CVE_CVSS3_METRICS])),
                "cvss2_score": str(none2empty(cve_detail[CVE_CVSS2_SCORE])),
                "cvss2_metrics": str(none2empty(cve_detail[CVE_CVSS2_METRICS])),
                "description": none2empty(cve_detail[CVE_DESCRIPTION]),
                "package_list": bin_pkg_list,
                "source_package_list": src_pkg_list,
                "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]],

            }
        response = {"cve_list": cve_list}
        response.update(pagination_response)
        if modified_since:
            response["modified_since"] = modified_since
        if published_since:
            response["published_since"] = published_since
        return response