예제 #1
0
    def process_list(self, api_version, data):  # pylint: disable=unused-argument
        """
        Returns info about all packages.

        :param data: json request parsed into data structure
        :returns: json response with package details
        """

        page = data.get("page", None)
        page_size = data.get("page_size", None)
        opts = dict(return_modified=data.get("return_modified", False), )
        modified_since = parse_datetime(data.get("modified_since", None))
        modified_since_int = self.modify_since_dt2int(modified_since)
        package_ids = self._get_package_ids(modified_since_int)
        page_package_ids, response = paginate(package_ids,
                                              page,
                                              page_size,
                                              sort_input=False)
        package_list = self._build_package_list(page_package_ids, opts)
        response['package_list'] = package_list
        response['last_change'] = utils.format_datetime(
            self.cache.dbchange['last_change'])
        response['total'] = len(package_ids)
        return response
예제 #2
0
    def process_list(self, api_version, data): # pylint: disable=unused-argument
        """
        This method returns details for given set of CVEs.

        :param data: data obtained from api, we're interested in data["cve_list"]

        :returns: list of dictionaries containing detailed information for given cve list}

        """
        cves_to_process = data.get("cve_list", None)
        modified_since = data.get("modified_since", None)
        published_since = data.get("published_since", None)
        rh_only = data.get('rh_only', False)
        errata_only = data.get('errata_associated', False)
        modified_since_dt = parse_datetime(modified_since)
        published_since_dt = parse_datetime(published_since)
        page = data.get("page", None)
        page_size = data.get("page_size", None)

        answer = {}
        if not cves_to_process:
            return answer

        cves_to_process = list(filter(None, cves_to_process))
        cves_to_process = self.try_expand_by_regex(cves_to_process)

        filters = [(filter_item_if_exists, [self.cache.cve_detail])]
        if rh_only:
            filters.append((self._filter_redhat_only, []))
        if errata_only:
            filters.append((self._filter_errata_only, []))
        # if we have information about modified/published dates and receive "modified_since"
        # or "published_since" in request,
        # compare the dates
        if modified_since:
            filters.append((self._filter_modified_since, [modified_since_dt]))

        if published_since:
            filters.append((self._filter_published_since, [published_since_dt]))

        cve_list = {}
        cve_page_to_process, pagination_response = paginate(cves_to_process, page, page_size, filters=filters)
        for cve in cve_page_to_process:
            cve_detail = self.cache.cve_detail.get(cve, None)
            if not cve_detail:
                continue

            bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, cve_detail[CVE_PID])
            cve_list[cve] = {
                "redhat_url": none2empty(cve_detail[CVE_REDHAT_URL]),
                "secondary_url": none2empty(cve_detail[CVE_SECONDARY_URL]),
                "synopsis": cve,
                "impact": none2empty(cve_detail[CVE_IMPACT]),
                "public_date": none2empty(format_datetime(cve_detail[CVE_PUBLISHED_DATE])),
                "modified_date": none2empty(format_datetime(cve_detail[CVE_MODIFIED_DATE])),
                "cwe_list": none2empty(cve_detail[CVE_CWE]),
                "cvss3_score": str(none2empty(cve_detail[CVE_CVSS3_SCORE])),
                "cvss3_metrics": str(none2empty(cve_detail[CVE_CVSS3_METRICS])),
                "cvss2_score": str(none2empty(cve_detail[CVE_CVSS2_SCORE])),
                "cvss2_metrics": str(none2empty(cve_detail[CVE_CVSS2_METRICS])),
                "description": none2empty(cve_detail[CVE_DESCRIPTION]),
                "package_list": bin_pkg_list,
                "source_package_list": src_pkg_list,
                "errata_list": [self.cache.errataid2name[eid] for eid in cve_detail[CVE_EID]],

            }
        response = {"cve_list": cve_list}
        response.update(pagination_response)
        response['last_change'] = format_datetime(self.cache.dbchange['last_change'])
        return response
예제 #3
0
 def _use_pagination(api_version: int, names: list, page: int,
                     page_size: int):
     if api_version >= 3:
         names_page, pagination_response = paginate(names, page, page_size)
         return names_page, pagination_response
     return names, {}
예제 #4
0
    def process_list(self, api_version, data):  # pylint: disable=unused-argument
        """
        Returns repository details.

        :param data: json request parsed into data structure

        :returns: json response with repository details
        """
        repos = data.get('repository_list', None)
        strip_prefixes(repos, REPO_PREFIXES)
        modified_since = data.get('modified_since', None)
        modified_since_dt = parse_datetime(modified_since)
        page = data.get("page", None)
        page_size = data.get("page_size", None)

        # By default, don't include third party data
        want_third_party = data.get('third_party', False)

        repolist = {}
        if not repos:
            return repolist

        filters = []
        if modified_since:
            filters.append((self._filter_modified_since, [modified_since_dt]))

        filters.append((self._filter_third_party, [want_third_party]))

        repos = self.try_expand_by_regex(repos)

        repos = list(set(repos))

        repo_details = {}
        for label in repos:
            for repo_id in self.cache.repolabel2ids.get(label, []):
                repo_details[label] = self.cache.repo_detail[repo_id]
        filters.append((filter_item_if_exists, [repo_details]))

        actual_page_size = 0
        repo_page_to_process, pagination_response = paginate(repos,
                                                             page,
                                                             page_size,
                                                             filters=filters)
        for label in repo_page_to_process:
            cs_id = self.cache.label2content_set_id[label]
            for repo_id in self.cache.repolabel2ids.get(label, []):
                repo_detail = self.cache.repo_detail[repo_id]
                if not modified_since_dt or self._modified_since(
                        repo_detail, modified_since_dt):
                    if repo_id in self.cache.repo_id2cpe_ids:
                        cpe_ids = self.cache.repo_id2cpe_ids[repo_id]
                    else:
                        cpe_ids = self.cache.content_set_id2cpe_ids.get(
                            cs_id, [])
                    repolist.setdefault(label, []).append({
                        "label":
                        label,
                        "name":
                        repo_detail[REPO_NAME],
                        "url":
                        repo_detail[REPO_URL],
                        "basearch":
                        none2empty(repo_detail[REPO_BASEARCH]),
                        "releasever":
                        none2empty(repo_detail[REPO_RELEASEVER]),
                        "product":
                        repo_detail[REPO_PRODUCT],
                        "revision":
                        format_datetime(repo_detail[REPO_REVISION]),
                        "cpes": [
                            self.cache.cpe_id2label[cpe_id]
                            for cpe_id in cpe_ids
                        ],
                        "third_party":
                        repo_detail[REPO_THIRD_PARTY]
                    })
            actual_page_size += len(repolist[label])

        response = {
            'repository_list': repolist,
            'last_change': format_datetime(self.cache.dbchange['last_change'])
        }

        pagination_response['page_size'] = actual_page_size
        response.update(pagination_response)

        return response
예제 #5
0
 def test_page_number(self):
     """Test pagination."""
     __, page_info = utils.paginate([], 2, 5)
     assert page_info["page"] == 2
     assert page_info["page_size"] == 0
예제 #6
0
 def test_negative_page_number(self):
     """Test pagination - page=-1 page_size=0."""
     __, page_info = utils.paginate([], -1, 0)
     assert page_info["page"] == utils.DEFAULT_PAGE
     assert page_info["page_size"] == 0
예제 #7
0
 def test_none_page_number(self):
     """Test pagination - page=page_size=None."""
     __, page_info = utils.paginate([], None, None)
     assert page_info["page"] == utils.DEFAULT_PAGE
     assert page_info["page_size"] == 0
예제 #8
0
    def process_list(self, api_version, data):  # pylint: disable=unused-argument
        """
        This method returns details for given set of Errata.

        :param data: data obtained from api, we're interested in data["errata_list"]

        :returns: dictionary containing detailed information for given errata list}
        """
        modified_since = data.get("modified_since", None)
        modified_since_dt = parse_datetime(modified_since)
        third_party = data.get("third_party", False)
        errata_to_process = data.get("errata_list", None)
        page = data.get("page", None)
        page_size = data.get("page_size", None)
        errata_type = data.get("type", None)
        severity = data.get("severity", [])

        response = {"errata_list": {},
                    "last_change": format_datetime(self.cache.dbchange["last_change"])}
        filters = [(filter_item_if_exists, [self.cache.errata_detail]),
                   (self._filter_third_party, [third_party])]
        if modified_since:
            # if we have information about modified/published dates and receive "modified_since" in request,
            # compare the dates
            filters.append((self._filter_modified_since, [modified_since_dt]))
        if errata_type:
            errata_type = [t.lower() for t in set(errata_type)] \
                if isinstance(errata_type, list) else [errata_type.lower()]
            response["type"] = errata_type
            filters.append((self._filter_errata_by_prop, ["type", errata_type]))

        if severity is None or len(severity) != 0:
            severity = self._prepare_severity(severity)
            response["severity"] = severity
            filters.append((self._filter_errata_by_prop, ["severity", severity]))

        if not errata_to_process:
            return response

        errata_to_process = self.try_expand_by_regex(errata_to_process)

        errata_list = {}
        errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters)
        for errata in errata_page_to_process:
            errata_detail = self.cache.errata_detail.get(errata, None)
            if not errata_detail:
                continue

            bin_pkg_list, src_pkg_list = pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS])
            releasevers = self._errata_releasevers(errata_detail[ERRATA_ID])

            if errata_detail[ERRATA_MODULE]:
                for index, module_update in enumerate(errata_detail[ERRATA_MODULE]):
                    if all(str(elem).isdigit() for elem in errata_detail[ERRATA_MODULE][index]["package_list"]):
                        module_pkg_list, module_src_pkg_list = pkgidlist2packages(
                            self.cache, module_update["package_list"])
                        errata_detail[ERRATA_MODULE][index]["package_list"] = module_pkg_list
                        errata_detail[ERRATA_MODULE][index]["source_package_list"] = module_src_pkg_list

            errata_list[errata] = {
                "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]),
                "summary": none2empty(errata_detail[ERRATA_SUMMARY]),
                "type": none2empty(errata_detail[ERRATA_TYPE]),
                "severity": errata_detail[ERRATA_SEVERITY],
                "description": none2empty(errata_detail[ERRATA_DESCRIPTION]),
                "solution": none2empty(errata_detail[ERRATA_SOLUTION]),
                "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])),
                "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])),
                "cve_list": errata_detail[ERRATA_CVE],
                "package_list": bin_pkg_list,
                "source_package_list": src_pkg_list,
                "bugzilla_list": errata_detail[ERRATA_BUGZILLA],
                "reference_list": errata_detail[ERRATA_REFERENCE],
                "modules_list": errata_detail[ERRATA_MODULE],
                "url": none2empty(errata_detail[ERRATA_URL]),
                "third_party": errata_detail[ERRATA_THIRD_PARTY],
                "requires_reboot": errata_detail[ERRATA_REQUIRES_REBOOT],
                "release_versions": releasevers,
            }
        response["errata_list"] = errata_list
        response.update(pagination_response)
        return response