def updated_advisories(self): if create_etag(data_src=self, url=self.url, etag_key="ETag"): raw_data = self.fetch() advisories = self.to_advisories(raw_data) return self.batch_advisories(advisories) return []
def updated_advisories(self): advisories = [] if create_etag(data_src=self, url=self.url, etag_key="ETag"): self.set_api() data = requests.get(self.url).content advisories.extend(self.to_advisories(data)) return self.batch_advisories(advisories)
def test_create_etag(self): assert self.data_source.config.etags == {} mock_response = MagicMock() mock_response.headers = {"ETag": "0x1234"} with patch("vulnerabilities.helpers.requests.head", return_value=mock_response): assert (create_etag(data_src=self.data_source, url="https://example.org", etag_key="ETag") is True) assert self.data_source.config.etags == { "https://example.org": "0x1234" } assert (create_etag(data_src=self.data_source, url="https://example.org", etag_key="ETag") is False)
def updated_advisories(self): advisories = [] all_urls = self.get_all_urls_of_backports(self.config.url) for url in all_urls: if not create_etag(data_src=self, url=url, etag_key="ETag"): continue advisories.extend(self.process_file(self._fetch_yaml(url))) return self.batch_advisories(advisories)
def fetch_pages(self): tomcat_major_versions = { i[0] for i in self.version_api.get("org.apache.tomcat:tomcat") } for version in tomcat_major_versions: page_url = self.base_url.format(version) if create_etag(self, page_url, "ETag"): yield requests.get(page_url).content
def updated_advisories(self) -> Set[Advisory]: # Etags are like hashes of web responses. We maintain # (url, etag) mappings in the DB. `create_etag` creates # (url, etag) pair. If a (url, etag) already exists then the code # skips processing the response further to avoid duplicate work if create_etag(data_src=self, url=self.url, etag_key="ETag"): raw_data = self.fetch() advisories = self.to_advisories(raw_data) return self.batch_advisories(advisories) return []
def updated_advisories(self): current_year = date.today().year # NVD json feeds start from 2002. for year in range(2002, current_year + 1): download_url = BASE_URL.format(year) # Etags are like hashes of web responses. We maintain # (url, etag) mappings in the DB. `create_etag` creates # (url, etag) pair. If a (url, etag) already exists then the code # skips processing the response further to avoid duplicate work if create_etag(data_src=self, url=download_url, etag_key="etag"): data = self.fetch(download_url) yield self.to_advisories(data)
def _fetch(self): releases = self.config.releases for release in releases: file_url = f"https://www.debian.org/security/oval/oval-definitions-{release}.xml" if not create_etag(data_src=self, url=file_url, etag_key="ETag"): continue resp = requests.get(file_url).content yield ( {"type": "deb", "namespace": "debian", "qualifiers": {"distro": release}}, ET.ElementTree(ET.fromstring(resp.decode("utf-8"))), ) return []
def _fetch(self): releases = self.config.releases for release in releases: file_url = f"https://people.canonical.com/~ubuntu-security/oval/com.ubuntu.{release}.cve.oval.xml.bz2" # nopep8 if not create_etag(data_src=self, url=file_url, etag_key="ETag"): continue resp = requests.get(file_url) extracted = bz2.decompress(resp.content) yield ( { "type": "deb", "namespace": "ubuntu" }, ET.ElementTree(ET.fromstring(extracted.decode("utf-8"))), ) # In case every file is latest, _fetch won't yield anything(due to checking for new etags), # this would return None to added_advisories # which will cause error, hence this # function return an empty list return []
def updated_advisories(self): advisories = [] if create_etag(data_src=self, url=self.config.db_url, etag_key="etag"): advisories.extend(self.to_advisories(fetch(self.config.db_url))) return self.batch_advisories(advisories)